diff --git a/llvm/projects/gpu_profiler/.gitignore b/llvm/projects/gpu_profiler/.gitignore
deleted file mode 100644
index dd2c293453382269c150c372d926f287a74edea5..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-*.swp
-jetsonTX2Power
-pp
diff --git a/llvm/projects/gpu_profiler/CMakeLists.txt b/llvm/projects/gpu_profiler/CMakeLists.txt
deleted file mode 100644
index c6cf3041eee354609b3999e5a8dcd424990f75ec..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/CMakeLists.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-cmake_minimum_required(VERSION 3.5)
-set(libsrc src/profiler.cpp)
-set (CMAKE_CXX_STANDARD 11)
-add_library(gpu_profiler STATIC ${libsrc})
-target_include_directories(gpu_profiler PRIVATE include)
diff --git a/llvm/projects/gpu_profiler/Makefile b/llvm/projects/gpu_profiler/Makefile
deleted file mode 100644
index 412d38265ab5c9408d4ac444ded9d6bd8b72f1b7..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/Makefile
+++ /dev/null
@@ -1,5 +0,0 @@
-all:
-	g++ -std=c++11 -O3 profiler.cpp -o pp -lpthread
-
-clean:
-	rm -rf pp
diff --git a/llvm/projects/gpu_profiler/include/profiler.h b/llvm/projects/gpu_profiler/include/profiler.h
deleted file mode 100644
index 9468554833fa82fd3b3c8c2ab85a393e21e92c80..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/include/profiler.h
+++ /dev/null
@@ -1,123 +0,0 @@
-#include <atomic>
-#include <chrono>
-#include <cmath>
-#include <condition_variable>
-#include <fstream>
-#include <iostream>
-#include <string>
-#include <thread>
-#include <vector>
-
-// Reads power rails at runtime and computes the GPU and DDR energy within a window  
-// of time, which is delimitered by the calls to resume_profiler() and pause_profiler()
-// 
-// IMPORTANT: Must call pause_profiler() to kill the profiler thread 
-//
-// Public interface methods:
-//      void start_profiler();
-//      void resume_profiler(); 
-//      void pause_profiler(); 
-//      std::pair<double, double> get_time_energy() const;
-//      void reset() 
-//      void pause_profiler();
-class Profiler {
-public:
-    using clock_type = std::chrono::high_resolution_clock;
-
-    Profiler();
-
-    ~Profiler();
-
-    // Reinitializes boolean vars used for control flow and launches the profiler 
-    // thread. DOES NOT reset other internal data structures. 
-	void start_profiler();
-
-    // Resumes the profiling of whatever executable's currently running
-    // DOES NOT reset any data 
-    void resume_profiler();
-
-    // Stops profiler by putting profiler thread to sleep 
-	void pause_profiler();
-
-    // Gets the delta time and total GPU and DDR energy between the last two
-    // calls to resume_profiler and pause_profiler
-    //
-    // Returns this as a pair of <delta time in milliseconds, energy>
-	std::pair<double, double> get_time_energy() const;
-
-    // Resets all internal data structures, including the vector storing all power_readings.
-	void reset();
-
-    // Exit the profiler and kill the thread
-    // Must call start_profiler() to reuse this object after calling pause_profiler()
-    void stop_profiler();
-
-private:
-    // Jetson's ARM cores' physical IDs. The two Denver cores are 1 and 2, and
-    // we can't use them.
-    const unsigned core0 = 0;
-    const unsigned core1 = 3;
-    const unsigned core2 = 4;
-    const unsigned core3 = 5;
-
-    // Power rails are mounted as files. Keeping the old power rail file names for possible future
-    // integrations
-    const std::string cpu_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0041/iio_device/in_power1_input";
-    const std::string gpu_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0040/iio_device/in_power0_input";
-    const std::string ddr_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0041/iio_device/in_power2_input";
-    const std::string soc_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0040/iio_device/in_power1_input";
-    const std::string sys_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0041/iio_device/in_power0_input";
-    // Critical assumption: If this file doesn't exist, then the board isn't a Jetson
-    const std::string jetson_chip_id = "/sys/module/tegra_fuse/parameters/tegra_chip_id";
-
-    // True if running on Jetson, else false
-    bool on_jetson_; 
-
-    // An individual power reading
-    struct PowerReading {
-        std::chrono::time_point<clock_type> time_;
-        double cpu_;
-        double gpu_;
-        double ddr_;
-        double soc_;
-        double sys_;
-    };
-
-    // Stores all power readings and is cleared only when reset() is called
-    std::vector<PowerReading> power_readings_;
-
-    // For reading the i2c buses via sysfs
-    std::ifstream cpu_stream_;
-    std::ifstream gpu_stream_;
-    std::ifstream ddr_stream_;
-    std::ifstream soc_stream_;
-    std::ifstream sys_stream_;
-
-    double run_profiler_overhead = 0.0;
-
-    mutable std::mutex vector_mutex_;
-
-    std::mutex mutex_;
-    
-    std::condition_variable cond_var_;
-
-    std::chrono::time_point<clock_type> start_time_;
-
-    std::chrono::time_point<clock_type> stop_time_;
-
-    std::atomic_bool should_run_profiler_; // True if we want to resume the profiling thread
-
-    std::atomic_bool should_stop_profiler_; // Quit profiling
-
-    std::thread profiler_thread_;
-
-    // Obtain's a single power reading from the GPU and DDR rails
-    void obtain_power_reading();
-
-    // Pins the given thread to the specified core
-    void pin_thread(std::thread &t, const unsigned core) const;
-
-    // Runs the profiler thread, keeping it alive by wrapping the functionality
-    // in an infinite loop 
-    void run_profiler();
-};
diff --git a/llvm/projects/gpu_profiler/offline_profiler.cpp b/llvm/projects/gpu_profiler/offline_profiler.cpp
deleted file mode 100644
index 6b9f37ef62cc2c8600d11474100f27873bc36d7a..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/offline_profiler.cpp
+++ /dev/null
@@ -1,595 +0,0 @@
-#include <cmath>
-#include <chrono>
-
-#include <iostream>
-#include <fstream>
-#include <string>
-#include <boost/algorithm/string.hpp>
-
-#include <vector>
-#include <map>
-
-#include <thread>
-#include <atomic>
-#include <sched.h>
-
-#define NUM_ARGS 4
-
-// This is a simple power profiler that can sample the power of the various
-// components in a Jetson TX2. The usage is simple: profile() measures power
-// for the specified program, and then dumpOutput() prints the readings to a
-// file. profile() can be called as many times as desired - the internal state
-// is reset each time and thus the measurements are not cumulative.
-class Profiler {
-private:
-    // Jetson's ARM cores' physical IDs. The two Denver cores are 1 and 2, and
-    // we can't use them.
-    const unsigned core0 = 0;
-    const unsigned core1 = 3;
-    const unsigned core2 = 4;
-    const unsigned core3 = 5;
-
-    // sysfs paths for i2c buses of various components
-    const char * const cpu_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0041/iio_device/in_power1_input";
-    const char * const gpu_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0040/iio_device/in_power0_input";
-    const char * const ddr_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0041/iio_device/in_power2_input";
-    const char * const soc_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0040/iio_device/in_power1_input";
-    const char * const sys_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0041/iio_device/in_power0_input";
-
-    // It takes some time for the GPU's power to return to idle (ms)
-    const unsigned gpu_idle_time = 0;
-
-    // An individual power reading
-    struct PowerReading {
-        std::chrono::time_point<std::chrono::high_resolution_clock> time_;
-        double cpu_;
-        double gpu_;
-        double ddr_;
-        double soc_;
-        double sys_;
-    };
-
-    // Individual tensor op
-    struct TensorOp {
-        std::string name_;
-
-        double start_;
-        double finish_;
-        double time_;
-
-        double energy_;
-        double gpu_energy_;
-        double ddr_energy_;
-
-        double power_;
-        double gpu_power_;
-        double ddr_power_;
-
-        TensorOp(std::string name, double start, double finish)
-            : name_(name), start_(start), finish_(finish), time_(finish - start),
-            energy_(0.0), gpu_energy_(0.0), ddr_energy_(0.0),
-            power_(0.0), gpu_power_(0.0), ddr_power_(0.0) {
-        }
-    };
-
-    // Aggregate tensor info
-    struct AggTensorInfo {
-        // Op name
-        std::string name_;
-
-        // Averages
-        double average_time_;
-
-        double average_energy_;
-        double average_gpu_energy_;
-        double average_ddr_energy_;
-
-        double average_power_;
-        double average_gpu_power_;
-        double average_ddr_power_;
-
-        // Standard deviations
-        double time_std_;
-
-        double energy_std_;
-        double gpu_energy_std_;
-        double ddr_energy_std_;
-
-        double power_std_;
-        double gpu_power_std_;
-        double ddr_power_std_;
-    };
-
-    // Total time, energy, and power
-    struct TotalInfo {
-        double time_;
-
-        double energy_;
-        double gpu_energy_;
-        double ddr_energy_;
-
-        double power_;
-        double gpu_power_;
-        double ddr_power_;
-
-        void clear() {
-            time_ = 0.0;
-
-            energy_ = 0.0;
-            gpu_energy_ = 0.0;
-            ddr_energy_ = 0.0;
-
-            power_ = 0.0;
-            gpu_power_ = 0.0;
-            ddr_power_ = 0.0;
-        }
-    };
-
-    // For reading the i2c buses via sysfs
-    std::ifstream cpu_stream_;
-    std::ifstream gpu_stream_;
-    std::ifstream ddr_stream_;
-    std::ifstream soc_stream_;
-    std::ifstream sys_stream_;
-
-    // Start time (so graph begins from t=0)
-    std::chrono::time_point<std::chrono::high_resolution_clock> start_time_;
-
-    // Per-run info
-    std::vector<PowerReading> power_readings_;
-
-    // Aggregate (across all runs) info
-    std::map<std::string, std::vector<TensorOp>> tensor_info_;
-    std::vector<AggTensorInfo> agg_tensor_info_;
-    TotalInfo total_info_;
-    unsigned iterations_;
-
-    // Start and stop flags to synchronize the program and profiling threads
-    std::atomic_bool start_;
-    std::atomic_bool stop_;
-
-private:
-    // Resets tensor info and total time and energy
-    void resetGlobal() {
-        tensor_info_.clear();
-        agg_tensor_info_.clear();
-        total_info_.clear();
-    }
-
-    // Resets power readings and flags
-    void resetLocal() {
-        power_readings_.clear();
-        start_ = false;
-        stop_ = false;
-    }
-
-    // Pins the given thread to the specified core
-    void pinThread(std::thread &t, const unsigned core) const {
-        cpu_set_t cpuset;
-        CPU_ZERO(&cpuset);
-        CPU_SET(core, &cpuset);
-        if (pthread_setaffinity_np(t.native_handle(), sizeof(cpu_set_t), &cpuset) != 0)
-            std::cout << "Couldn't set thread affinity\n";
-    }
-
-    // Adds a tensor op to the map
-    void addTensorOp(std::string &op_name, TensorOp &top) {
-        // Create a vector if this is the first entry
-        auto it = tensor_info_.find(op_name);
-        if (it == tensor_info_.end()) {
-            tensor_info_.insert(std::pair<std::string, std::vector<TensorOp>>(op_name, std::vector<TensorOp>()));
-        }
-        tensor_info_[op_name].push_back(top);
-    }
-
-    // Obtain's a single power reading from the GPU and DDR rails
-    void getPowerReading() {
-        PowerReading reading;
-
-        // The order matters here. All the reads have to happen together first
-        // and then all the seeks have to happen together at the end, otherwise
-        // there will be a significant time difference between the readings of
-        // the different rails.
-        reading.time_ = std::chrono::high_resolution_clock::now();
-        gpu_stream_ >> reading.gpu_;
-        ddr_stream_ >> reading.ddr_;
-        power_readings_.push_back(reading);
-
-        // Reset the input position of the files
-        gpu_stream_.seekg(0);
-        ddr_stream_.seekg(0);
-    }
-
-    // Executes the program to be profiled
-    void runProgram(const std::string& program) {
-        // Tell the profiling thread to start, execute the program that needs
-        // to be profiled, and then tell the profiling thread to stop.
-        start_ = true;
-        const auto result = std::system(program.c_str());
-        stop_ = true;
-    }
-
-    // Records power while the program is running
-    void recordPower() {
-        // Obtain the new start time, wait for the start signal, and keep
-        // profiling until the stop flag is set.
-        start_time_ = std::chrono::high_resolution_clock::now();
-        while (!start_);
-        while (!stop_)
-            getPowerReading();
-    }
-
-    // Calculates stats for the entire execution (CPU+GPU phase)
-    void updateTotalStats() {
-        double energy = 0.0;
-        double gpu_energy = 0.0;
-        double ddr_energy = 0.0;
-
-        std::chrono::time_point<std::chrono::high_resolution_clock> prev_time = start_time_;
-        for (auto reading : power_readings_) {
-            std::chrono::duration<double> duration = reading.time_ - prev_time;
-            gpu_energy += reading.gpu_ * duration.count();
-            ddr_energy += reading.ddr_ * duration.count();
-            prev_time = reading.time_;
-        }
-        energy = gpu_energy + ddr_energy;
-        auto time = std::chrono::duration<double>(prev_time - start_time_).count();
-
-        total_info_.time_ += time;
-        total_info_.energy_ += (gpu_energy + ddr_energy);
-        total_info_.gpu_energy_ += gpu_energy;
-        total_info_.ddr_energy_ += ddr_energy;
-
-        total_info_.power_ += (energy / time);
-        total_info_.gpu_power_ += (gpu_energy / time);
-        total_info_.ddr_power_ += (ddr_energy / time);
-    }
-
-    // Calculates energy and power usage of the given tensor operation
-    void calculateTensorEP(TensorOp &top) const {
-        auto prev_time = top.start_;
-        unsigned i = 0;
-
-        // Skip until we hit the start time of the operation
-        for (; std::chrono::duration<double>(power_readings_[i].time_.time_since_epoch()).count() < top.start_; i++);
-
-        // Keep going until we hit the finish time of the operation or we run out of readings
-        for (double curr_time; ((curr_time = std::chrono::duration<double>(power_readings_[i].time_.time_since_epoch()).count()) <= top.finish_)
-                && (i < power_readings_.size()); i++) {
-            auto duration = curr_time - prev_time;
-            prev_time = curr_time;
-
-            top.gpu_energy_ += power_readings_[i].gpu_ * duration;
-            top.ddr_energy_ += power_readings_[i].ddr_ * duration;
-        }
-        top.energy_ = top.gpu_energy_ + top.ddr_energy_;
-
-        top.power_ = top.energy_ / top.time_;
-        top.gpu_power_ = top.gpu_energy_ / top.time_;
-        top.ddr_power_ = top.ddr_energy_ / top.time_;
-    }
-
-    // Calculates stats for all the tensors in the timestamp file
-    void updatePerOpStats() {
-        const char * const op_file = "profile_data.txt";
-        std::string line;
-        std::ifstream ifs(op_file, std::ios::in);
-
-        // Calculate time and energy for each tensor operation. There are two
-        // possibilities for the file format:
-        // If the line doesn't begin with #, we are looking at FP32 code
-        // without any conversions to/from FP16, and each operation occupies
-        // two consecutive lines in the timestamp file.
-        // If the line does begin with #, we are looking at FP16 code with
-        // conversion routines in the middle. In this case, *after* the current
-        // line, there will be two lines for F2H, two lines for H2F, and then
-        // one line for the end of the operation.
-        while (std::getline(ifs, line)) {
-            std::vector<std::string> tokens;
-            boost::split(tokens, line, boost::is_any_of("\t"));
-            std::string op_name = tokens[0];
-
-            // FP32
-            if (tokens[0][0] != '#') {
-                // First line with tensor op name and start time
-                std::string op_name = tokens[0];
-                const auto start = std::stod(tokens[1]);
-
-                // Second line with tensor op end time
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                const auto finish = std::stod(tokens[1]);
-
-                TensorOp top(op_name, start, finish);
-                calculateTensorEP(top);
-                addTensorOp(op_name, top);
-            } else {
-                // First line with tensor op name and start time
-                std::string op_name = tokens[0].substr(1);
-                const auto start = std::stod(tokens[1]);
-
-                // Second line with f2h
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                std::string f2h_name = op_name + "_f2h";
-                const auto f2h_start = std::stod(tokens[1]);
-
-                // Third line with f2h
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                const auto f2h_finish = std::stod(tokens[1]);
-
-                // Add f2h
-                TensorOp f2h(f2h_name, f2h_start, f2h_finish);
-                calculateTensorEP(f2h);
-                addTensorOp(f2h_name, f2h);
-
-                // Fourth line with h2f
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                std::string h2f_name = op_name + "_h2f";
-                const auto h2f_start = std::stod(tokens[1]);
-
-                // Fifth line with h2f
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                const auto h2f_finish = std::stod(tokens[1]);
-
-                // Add h2f
-                TensorOp h2f(h2f_name, h2f_start, h2f_finish);
-                calculateTensorEP(h2f);
-                addTensorOp(h2f_name, h2f);
-
-                // Sixth and final line with tensor op end time
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                const auto finish = std::stod(tokens[1]);
-
-                // Subtract f2h's and h2f's time and energy to get just the computation's info
-                TensorOp top(op_name, start, finish);
-                calculateTensorEP(top);
-
-                top.time_ -= (f2h.time_ + h2f.time_);
-                top.energy_ -= (f2h.energy_ + h2f.energy_);
-                top.gpu_energy_ -= (f2h.gpu_energy_ + h2f.gpu_energy_);
-                top.ddr_energy_ -= (f2h.ddr_energy_ + h2f.ddr_energy_);
-                top.power_ = top.energy_ / top.time_;
-                top.gpu_power_ = top.gpu_energy_ / top.time_;
-                top.ddr_power_ = top.ddr_energy_ / top.time_;
-
-                addTensorOp(op_name, top);
-            }
-        }
-        ifs.close();
-    }
-
-    void updateStats() {
-        updatePerOpStats();
-        updateTotalStats();
-    }
-
-    // Calculates the average and standard deviation of each metric of each tensor op
-    void calculateAggregateStats() {
-        for (auto it = tensor_info_.begin(); it != tensor_info_.end(); it++) {
-            AggTensorInfo ati;
-            ati.name_ = it->first;
-            auto topv = it->second;
-
-            double total_time = 0.0;
-            double total_energy = 0.0;
-            double total_gpu_energy = 0.0;
-            double total_ddr_energy = 0.0;
-            double total_power = 0.0;
-            double total_gpu_power = 0.0;
-            double total_ddr_power = 0.0;
-
-            double time_sum = 0.0;
-            double energy_sum = 0.0;
-            double gpu_energy_sum = 0.0;
-            double ddr_energy_sum = 0.0;
-            double power_sum = 0.0;
-            double gpu_power_sum = 0.0;
-            double ddr_power_sum = 0.0;
-
-            // Calculate average
-            for (const auto &top : topv) {
-                total_time += top.time_;
-                total_energy += top.energy_;
-                total_gpu_energy += top.gpu_energy_;
-                total_ddr_energy += top.ddr_energy_;
-                total_power += top.power_;
-                total_gpu_power += top.gpu_power_;
-                total_ddr_power += top.ddr_power_;
-            }
-
-            ati.average_time_ = total_time / iterations_;
-            ati.average_energy_ = total_energy / iterations_;
-            ati.average_gpu_energy_ = total_gpu_energy / iterations_;
-            ati.average_ddr_energy_ = total_ddr_energy / iterations_;
-            ati.average_power_ = total_power / iterations_;
-            ati.average_gpu_power_ = total_gpu_power / iterations_;
-            ati.average_ddr_power_ = total_ddr_power / iterations_;
-
-            // Calculate standard deviation
-            for (const auto &top : topv) {
-                auto time_diff = top.time_ - ati.average_time_;
-                time_sum += time_diff * time_diff;
-
-                auto energy_diff = top.energy_ - ati.average_energy_;
-                energy_sum += energy_diff * energy_diff;
-                auto gpu_energy_diff = top.gpu_energy_ - ati.average_gpu_energy_;
-                gpu_energy_sum += gpu_energy_diff * gpu_energy_diff;
-                auto ddr_energy_diff = top.ddr_energy_ - ati.average_ddr_energy_;
-                ddr_energy_sum += ddr_energy_diff * ddr_energy_diff;
-
-                auto power_diff = top.power_ - ati.average_power_;
-                power_sum += power_diff * power_diff;
-                auto gpu_power_diff = top.gpu_power_ - ati.average_gpu_power_;
-                gpu_power_sum += gpu_power_diff * gpu_power_diff;
-                auto ddr_power_diff = top.ddr_power_ - ati.average_ddr_power_;
-                ddr_power_sum += ddr_power_diff * ddr_power_diff;
-            }
-
-            ati.time_std_ = std::sqrt(time_sum / iterations_);
-            ati.energy_std_ = std::sqrt(energy_sum / iterations_);
-            ati.gpu_energy_std_ = std::sqrt(gpu_energy_sum / iterations_);
-            ati.ddr_energy_std_ = std::sqrt(ddr_energy_sum / iterations_);
-            ati.power_std_ = std::sqrt(power_sum / iterations_);
-            ati.gpu_power_std_ = std::sqrt(gpu_power_sum / iterations_);
-            ati.ddr_power_std_ = std::sqrt(ddr_power_sum / iterations_);
-
-            agg_tensor_info_.push_back(ati);
-        }
-    }
-
-public:
-    Profiler() {
-        cpu_stream_.open(cpu_power_rail, std::ifstream::in);
-        gpu_stream_.open(gpu_power_rail, std::ifstream::in);
-        ddr_stream_.open(ddr_power_rail, std::ifstream::in);
-        soc_stream_.open(soc_power_rail, std::ifstream::in);
-        sys_stream_.open(sys_power_rail, std::ifstream::in);
-
-        if (!cpu_stream_.is_open() or !gpu_stream_.is_open() or !ddr_stream_.is_open()
-            or !soc_stream_.is_open() or !sys_stream_.is_open()) {
-            std::cout << "Failed to open one of the power rails for reading\n";
-            exit(1);
-        }
-    }
-
-    ~Profiler() {
-        cpu_stream_.close();
-        gpu_stream_.close();
-        ddr_stream_.close();
-        soc_stream_.close();
-        sys_stream_.close();
-    }
-
-    void profile(const std::string& program, const int iterations) {
-        iterations_ = iterations;
-        resetGlobal();
-
-        for (unsigned i = 0; i < iterations_; i++) {
-            resetLocal();
-
-            // Launch two threads: one for running the program and one for
-            // profiling it. Pin the threads to specific cores to remove migration
-            // overhead. Profiling showed that the sampling rate increases slightly
-            // with pinning.
-            std::thread prog(&Profiler::runProgram, this, program);
-            std::thread power(&Profiler::recordPower, this);
-            pinThread(prog, core1);
-            pinThread(power, core2);
-            prog.join();
-            power.join();
-
-            updateStats();
-
-            // Sleep for some time to bring the GPU back to idle
-            std::this_thread::sleep_for(std::chrono::milliseconds(gpu_idle_time));
-        }
-
-        calculateAggregateStats();
-    }
-
-    void dumpTensorInfo(const char * const filename) const {
-        std::cout<<"dumping to"<<filename<<'\n';
-        const std::string header = "Op,Time (ms),Energy (mJ),GPU Energy (mJ),DDR Energy (mJ),Power (mW),GPU Power (mW),DDR Power (mW),Time std,Energy std,GPU Energy std,DDR Energy std,Power std,GPU Power std,DDR Power std\n";
-        std::ofstream ofs;
-        ofs.open(filename);
-        //ofs << header;
-        for (const auto &ati : agg_tensor_info_) {
-            ofs << ati.name_
-                << "," << ati.average_time_ * 1e3
-                << "," << ati.average_energy_
-                /*
-                << "," << ati.average_gpu_energy_
-                << "," << ati.average_ddr_energy_
-                << "," << ati.average_power_
-                << "," << ati.average_gpu_power_
-                << "," << ati.average_ddr_power_
-                << "," << ati.time_std_ * 1e3
-                << "," << ati.energy_std_
-                << "," << ati.gpu_energy_std_
-                << "," << ati.ddr_energy_std_
-                << "," << ati.power_std_
-                << "," << ati.gpu_power_std_
-                << "," << ati.ddr_power_std_*/
-                << "\n";
-
-            std::cout << ati.average_time_ * 1e3 << "," << ati.average_energy_ << "\n";
-        }
-        ofs.close();
-    }
-
-    void dumpPowerReadings(const char * const filename) const {
-        std::ofstream ofs;
-        ofs.open(filename);
-        for (const auto &reading : power_readings_) {
-            std::chrono::duration<double> duration = reading.time_ - start_time_;
-            //std::chrono::duration<double> duration = reading.time_.time_since_epoch();
-            ofs << std::to_string(duration.count())
-                << " " << reading.gpu_
-                << " " << reading.ddr_
-                << "\n";
-        }
-        ofs.close();
-    }
-
-    void dumpTotalInfo() const {
-        auto total_time = total_info_.time_ / iterations_;
-
-        auto total_energy = total_info_.energy_ / iterations_;
-        auto gpu_energy = total_info_.gpu_energy_ / iterations_;
-        auto ddr_energy = total_info_.ddr_energy_ / iterations_;
-
-        auto power = total_info_.power_ / iterations_;
-        auto gpu_power = total_info_.gpu_power_ / iterations_;
-        auto ddr_power = total_info_.ddr_power_ / iterations_;
-
-        std::cout << "-----------------------------------------------------\n";
-        std::cout << "Program info (average)\n";
-        std::cout << "-----------------------------------------------------\n";
-        std::cout << "\tExecution time: " << total_time << " seconds\n";
-        std::cout << "\tTotal energy:   " << total_energy << " mJ\n";
-        std::cout << "\t    GPU:        " << gpu_energy << " mJ\n";
-        std::cout << "\t    DDR:        " << ddr_energy << " mJ\n";
-        std::cout << "\tPower:          " << power << " mW\n";
-        std::cout << "\t    GPU:        " << gpu_power << " mW\n";
-        std::cout << "\t    DDR:        " << ddr_power << " mW\n";
-        std::cout << "-----------------------------------------------------\n";
-    }
-};
-
-int main(int argc, char *argv[]) {
-    if (argc < NUM_ARGS) {
-        std::cout << "Usage: " << argv[0] << " <program> <params> END_PARAM <iterations> <tensor output file> [power output file]\n";
-        exit(1);
-    }
-
-    std::string program(argv[1]);
-    size_t i = 2;
-    for (; i < argc; i++){
-        if (std::string(argv[i]) == "END_PARAM"){
-            break;
-        }
-        program += " " + std::string(argv[i]);
-    }
-    i += 1;
-
-    Profiler pp;
-    pp.profile(program, std::stoi(argv[i]));
-    pp.dumpTensorInfo(argv[i + 1]);
-
-    if (argc > NUM_ARGS)
-        pp.dumpPowerReadings(argv[i + 2]);
-
-    return 0;
-}
-
diff --git a/llvm/projects/gpu_profiler/plot.sh b/llvm/projects/gpu_profiler/plot.sh
deleted file mode 100755
index 8e4573b10c2fab993b4998d2040d10b0f7e9f9c5..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/plot.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-input=$1
-gnuplot -p << EOF
-    #set terminal png
-    #set output "$input.png"
-    set xlabel "Time (s)"
-    set ylabel "Power (mW)"
-    set title "Power usage of GPU and DDR over time"
-    plot "$input" using 1:2 title 'GPU' with lines,"$input" using 1:3 title 'DDR' with lines
-EOF
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-1.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-1.pdf
deleted file mode 100644
index af57723b4091da6feffa9ef8f789698837b90bfa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-1.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-2.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-2.pdf
deleted file mode 100644
index 57c5597c28e1028fa643bc5b03db8fc51d0f4b6b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-2.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-3.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-3.pdf
deleted file mode 100644
index c7e0e3b2e7ff9d52c66b208321ecfa858ef5d9da..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-3.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-ddr-1.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-ddr-1.pdf
deleted file mode 100644
index 85bba9ee4c6dea2b1a7356d3847acb9aa5ea85aa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-ddr-1.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-ddr-2.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-ddr-2.pdf
deleted file mode 100644
index a7ddb64b1e9e97f8ba93c52b38402dd2293725ef..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-ddr-2.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-ddr-3.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-ddr-3.pdf
deleted file mode 100644
index 5865ac69d6c5187fac0476f87d20e3a5154d516f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3fullCLK-ddr-3.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-1.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-1.pdf
deleted file mode 100644
index f235128927672fdb46c54dd357cafb3c275a7144..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-1.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-2.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-2.pdf
deleted file mode 100644
index e80e4cde621b99ef074e1c97aadd191d6b9777c1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-2.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-3.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-3.pdf
deleted file mode 100644
index 61f5d13e1cd040dbf7bdef058f34387f83a7df23..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-3.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-ddr-1.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-ddr-1.pdf
deleted file mode 100644
index fbd3ebd141c6eb0582496c85dcd5388a6a0bce7b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-ddr-1.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-ddr-2.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-ddr-2.pdf
deleted file mode 100644
index f19c5a204ccc418c5af80c8953b9c28f39c3fd93..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-ddr-2.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-ddr-3.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-ddr-3.pdf
deleted file mode 100644
index 4ae009ab08d8368139c09a4f51e5cadc3623fefb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-ddr-3.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-gpu-ddr.pdf b/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-gpu-ddr.pdf
deleted file mode 100644
index 5996b934a55fd90d9cd773d42e9cfa89429cab68..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/lenet/lenet-m3normal-gpu-ddr.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/tests/merge1.pdf b/llvm/projects/gpu_profiler/results/tests/merge1.pdf
deleted file mode 100644
index 5d3c3540c2e67aa7943f5993cfebf0cca40412fe..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/tests/merge1.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/tests/merge3.pdf b/llvm/projects/gpu_profiler/results/tests/merge3.pdf
deleted file mode 100644
index c38e37e2d610175311fb3221fd5fa26892fdf1e8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/tests/merge3.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-1.pdf b/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-1.pdf
deleted file mode 100644
index 61bdcc890e0b5800ee4b3a8e19abdc724461a01b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-1.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-2.pdf b/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-2.pdf
deleted file mode 100644
index bf80ebf1859640bdec386ef470fc04ada74ea822..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-2.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-3.pdf b/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-3.pdf
deleted file mode 100644
index 557bc295b6386ea73a0fcbd140de63a326f6aecd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-3.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-tool.pdf b/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-tool.pdf
deleted file mode 100644
index d00032f35f77934432832cef4c00124327a14169..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/tests/mm300-gpu+ddr-tool.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/tests/mm300-gpu-1.pdf b/llvm/projects/gpu_profiler/results/tests/mm300-gpu-1.pdf
deleted file mode 100644
index 030d102341f58c57753d436554278c8d628137a8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/tests/mm300-gpu-1.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/tests/mm300-gpu-2.pdf b/llvm/projects/gpu_profiler/results/tests/mm300-gpu-2.pdf
deleted file mode 100644
index e6f97075597d8080bf223a1fd1a35b8969b9b141..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/tests/mm300-gpu-2.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/results/tests/mm300-gpu-3.pdf b/llvm/projects/gpu_profiler/results/tests/mm300-gpu-3.pdf
deleted file mode 100644
index 6898f235ef85bd5148f5fcfdcf50b4f14ca2ab19..0000000000000000000000000000000000000000
Binary files a/llvm/projects/gpu_profiler/results/tests/mm300-gpu-3.pdf and /dev/null differ
diff --git a/llvm/projects/gpu_profiler/run.pl b/llvm/projects/gpu_profiler/run.pl
deleted file mode 100755
index 8674e63d9453fbb1e07371d99cf22c4745f234b3..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/run.pl
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-my $time;
-my $iterations = 100;
-
-# FP32
-print "############### FP32 ##############\n";
-
-print "Running Lenet\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./lenet_tanh $iterations lenet-fp32.csv`;
-
-print "Running FC2\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./fc2_clipped $iterations fc2-fp32.csv`;
-
-print "Running FC3\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./fc3_clipped $iterations fc3-fp32.csv`;
-
-print "Running FC4\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./fc4_clipped $iterations fc4-fp32.csv`;
-
-print "Running CIFAR\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./cifar_keras $iterations cifar-fp32.csv`;
-
-# FP16
-print "############### FP16 ##############\n";
-
-print "Running Lenet\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./lenet_tanh_half $iterations lenet-fp16.csv`;
-
-print "Running FC2\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./fc2_half $iterations fc2-fp16.csv`;
-
-print "Running FC3\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./fc3_half $iterations fc3-fp16.csv`;
-
-print "Running FC4\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./fc4_half $iterations fc4-fp16.csv`;
-
-print "Running CIFAR\n";
-$time = `date`;
-print $time;
-`~/awesome_profiler/pp ./cifar_keras_half $iterations cifar-fp16.csv`;
-
diff --git a/llvm/projects/gpu_profiler/run_dnns.pl b/llvm/projects/gpu_profiler/run_dnns.pl
deleted file mode 100755
index 041f3e3cae8598d34ac8d38f65cd37d51e8aa0ba..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/run_dnns.pl
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-my $time;
-my $iterations = 100;
-my @networks = ("alexnet", "alexnet2", "resnet18", "vgg16");
-
-# FP32
-print "############### FP32 ##############\n";
-
-foreach my $network (@networks) {
-    print "Running $network\n";
-    $time = `date`;
-    print $time;
-    `~/awesome_profiler/pp ./${network}_cifar10 $iterations ${network}_fp32.csv`;
-}
-
-# FP16
-print "############### FP16 ##############\n";
-
-foreach my $network (@networks) {
-    print "Running $network\n";
-    $time = `date`;
-    print $time;
-    `~/awesome_profiler/pp ./${network}_cifar10_half $iterations ${network}_fp16.csv`;
-}
-
diff --git a/llvm/projects/gpu_profiler/run_image_pipelines.pl b/llvm/projects/gpu_profiler/run_image_pipelines.pl
deleted file mode 100755
index 8e6df67d2e96d343cff3cc6a324693c14abaa3f3..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/run_image_pipelines.pl
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-my $time;
-my $iterations = 100;
-my @pipelines = ("pipeline_GEMO", "pipeline_GEO", "pipeline_GEOM", "pipeline_GSM", "pipeline_GSME");
-
-# FP32
-print "############### FP32 ##############\n";
-
-foreach my $pipeline (@pipelines) {
-    print "Running $pipeline\n";
-    $time = `date`;
-    print $time;
-    `~/awesome_profiler/pp ./${pipeline} $iterations ${pipeline}_fp32.csv`;
-}
-
-# FP16
-print "############### FP16 ##############\n";
-
-foreach my $pipeline (@pipelines) {
-    print "Running $pipeline\n";
-    $time = `date`;
-    print $time;
-    `~/awesome_profiler/pp ./${pipeline}_half $iterations ${pipeline}_fp16.csv`;
-}
-
diff --git a/llvm/projects/gpu_profiler/src/offline_profiler.cpp b/llvm/projects/gpu_profiler/src/offline_profiler.cpp
deleted file mode 100644
index 25ca45241c29e7a0f8edb0518d8347a185caf5a4..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/src/offline_profiler.cpp
+++ /dev/null
@@ -1,584 +0,0 @@
-#include <cmath>
-#include <chrono>
-
-#include <iostream>
-#include <fstream>
-#include <string>
-#include <boost/algorithm/string.hpp>
-
-#include <vector>
-#include <map>
-
-#include <thread>
-#include <atomic>
-#include <sched.h>
-
-#define NUM_ARGS 4
-
-// This is a simple power profiler that can sample the power of the various
-// components in a Jetson TX2. The usage is simple: profile() measures power
-// for the specified program, and then dumpOutput() prints the readings to a
-// file. profile() can be called as many times as desired - the internal state
-// is reset each time and thus the measurements are not cumulative.
-class Profiler {
-private:
-    // Jetson's ARM cores' physical IDs. The two Denver cores are 1 and 2, and
-    // we can't use them.
-    const unsigned core0 = 0;
-    const unsigned core1 = 3;
-    const unsigned core2 = 4;
-    const unsigned core3 = 5;
-
-    // sysfs paths for i2c buses of various components
-    const char * const cpu_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0041/iio_device/in_power1_input";
-    const char * const gpu_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0040/iio_device/in_power0_input";
-    const char * const ddr_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0041/iio_device/in_power2_input";
-    const char * const soc_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0040/iio_device/in_power1_input";
-    const char * const sys_power_rail = "/sys/devices/3160000.i2c/i2c-0/0-0041/iio_device/in_power0_input";
-
-    // It takes some time for the GPU's power to return to idle (ms)
-    const unsigned gpu_idle_time = 0;
-
-    // An individual power reading
-    struct PowerReading {
-        std::chrono::time_point<std::chrono::high_resolution_clock> time_;
-        double cpu_;
-        double gpu_;
-        double ddr_;
-        double soc_;
-        double sys_;
-    };
-
-    // Individual tensor op
-    struct TensorOp {
-        std::string name_;
-
-        double start_;
-        double finish_;
-        double time_;
-
-        double energy_;
-        double gpu_energy_;
-        double ddr_energy_;
-
-        double power_;
-        double gpu_power_;
-        double ddr_power_;
-
-        TensorOp(std::string name, double start, double finish)
-            : name_(name), start_(start), finish_(finish), time_(finish - start),
-            energy_(0.0), gpu_energy_(0.0), ddr_energy_(0.0),
-            power_(0.0), gpu_power_(0.0), ddr_power_(0.0) {
-        }
-    };
-
-    // Aggregate tensor info
-    struct AggTensorInfo {
-        // Op name
-        std::string name_;
-
-        // Averages
-        double average_time_;
-
-        double average_energy_;
-        double average_gpu_energy_;
-        double average_ddr_energy_;
-
-        double average_power_;
-        double average_gpu_power_;
-        double average_ddr_power_;
-
-        // Standard deviations
-        double time_std_;
-
-        double energy_std_;
-        double gpu_energy_std_;
-        double ddr_energy_std_;
-
-        double power_std_;
-        double gpu_power_std_;
-        double ddr_power_std_;
-    };
-
-    // Total time, energy, and power
-    struct TotalInfo {
-        double time_;
-
-        double energy_;
-        double gpu_energy_;
-        double ddr_energy_;
-
-        double power_;
-        double gpu_power_;
-        double ddr_power_;
-
-        void clear() {
-            time_ = 0.0;
-
-            energy_ = 0.0;
-            gpu_energy_ = 0.0;
-            ddr_energy_ = 0.0;
-
-            power_ = 0.0;
-            gpu_power_ = 0.0;
-            ddr_power_ = 0.0;
-        }
-    };
-
-    // For reading the i2c buses via sysfs
-    std::ifstream cpu_stream_;
-    std::ifstream gpu_stream_;
-    std::ifstream ddr_stream_;
-    std::ifstream soc_stream_;
-    std::ifstream sys_stream_;
-
-    // Start time (so graph begins from t=0)
-    std::chrono::time_point<std::chrono::high_resolution_clock> start_time_;
-
-    // Per-run info
-    std::vector<PowerReading> power_readings_;
-
-    // Aggregate (across all runs) info
-    std::map<std::string, std::vector<TensorOp>> tensor_info_;
-    std::vector<AggTensorInfo> agg_tensor_info_;
-    TotalInfo total_info_;
-    unsigned iterations_;
-
-    // Start and stop flags to synchronize the program and profiling threads
-    std::atomic_bool start_;
-    std::atomic_bool stop_;
-
-private:
-    // Resets tensor info and total time and energy
-    void resetGlobal() {
-        tensor_info_.clear();
-        agg_tensor_info_.clear();
-        total_info_.clear();
-    }
-
-    // Resets power readings and flags
-    void resetLocal() {
-        power_readings_.clear();
-        start_ = false;
-        stop_ = false;
-    }
-
-    // Pins the given thread to the specified core
-    void pinThread(std::thread &t, const unsigned core) const {
-        cpu_set_t cpuset;
-        CPU_ZERO(&cpuset);
-        CPU_SET(core, &cpuset);
-        if (pthread_setaffinity_np(t.native_handle(), sizeof(cpu_set_t), &cpuset) != 0)
-            std::cout << "Couldn't set thread affinity\n";
-    }
-
-    // Adds a tensor op to the map
-    void addTensorOp(std::string &op_name, TensorOp &top) {
-        // Create a vector if this is the first entry
-        auto it = tensor_info_.find(op_name);
-        if (it == tensor_info_.end()) {
-            tensor_info_.insert(std::pair<std::string, std::vector<TensorOp>>(op_name, std::vector<TensorOp>()));
-        }
-        tensor_info_[op_name].push_back(top);
-    }
-
-    // Obtain's a single power reading from the GPU and DDR rails
-    void getPowerReading() {
-        PowerReading reading;
-
-        // The order matters here. All the reads have to happen together first
-        // and then all the seeks have to happen together at the end, otherwise
-        // there will be a significant time difference between the readings of
-        // the different rails.
-        reading.time_ = std::chrono::high_resolution_clock::now();
-        gpu_stream_ >> reading.gpu_;
-        ddr_stream_ >> reading.ddr_;
-        power_readings_.push_back(reading);
-
-        // Reset the input position of the files
-        gpu_stream_.seekg(0);
-        ddr_stream_.seekg(0);
-    }
-
-    // Executes the program to be profiled
-    void runProgram(const char * const program) {
-        // Tell the profiling thread to start, execute the program that needs
-        // to be profiled, and then tell the profiling thread to stop.
-        start_ = true;
-        const auto result = std::system(program);
-        stop_ = true;
-    }
-
-    // Records power while the program is running
-    void recordPower() {
-        // Obtain the new start time, wait for the start signal, and keep
-        // profiling until the stop flag is set.
-        start_time_ = std::chrono::high_resolution_clock::now();
-        while (!start_);
-        while (!stop_)
-            getPowerReading();
-    }
-
-    // Calculates stats for the entire execution (CPU+GPU phase)
-    void updateTotalStats() {
-        double energy = 0.0;
-        double gpu_energy = 0.0;
-        double ddr_energy = 0.0;
-
-        std::chrono::time_point<std::chrono::high_resolution_clock> prev_time = start_time_;
-        for (auto reading : power_readings_) {
-            std::chrono::duration<double> duration = reading.time_ - prev_time;
-            gpu_energy += reading.gpu_ * duration.count();
-            ddr_energy += reading.ddr_ * duration.count();
-            prev_time = reading.time_;
-        }
-        energy = gpu_energy + ddr_energy;
-        auto time = std::chrono::duration<double>(prev_time - start_time_).count();
-
-        total_info_.time_ += time;
-        total_info_.energy_ += (gpu_energy + ddr_energy);
-        total_info_.gpu_energy_ += gpu_energy;
-        total_info_.ddr_energy_ += ddr_energy;
-
-        total_info_.power_ += (energy / time);
-        total_info_.gpu_power_ += (gpu_energy / time);
-        total_info_.ddr_power_ += (ddr_energy / time);
-    }
-
-    // Calculates energy and power usage of the given tensor operation
-    void calculateTensorEP(TensorOp &top) const {
-        auto prev_time = top.start_;
-        unsigned i = 0;
-
-        // Skip until we hit the start time of the operation
-        for (; std::chrono::duration<double>(power_readings_[i].time_.time_since_epoch()).count() < top.start_; i++);
-
-        // Keep going until we hit the finish time of the operation or we run out of readings
-        for (double curr_time; ((curr_time = std::chrono::duration<double>(power_readings_[i].time_.time_since_epoch()).count()) <= top.finish_)
-                && (i < power_readings_.size()); i++) {
-            auto duration = curr_time - prev_time;
-            prev_time = curr_time;
-
-            top.gpu_energy_ += power_readings_[i].gpu_ * duration;
-            top.ddr_energy_ += power_readings_[i].ddr_ * duration;
-        }
-        top.energy_ = top.gpu_energy_ + top.ddr_energy_;
-
-        top.power_ = top.energy_ / top.time_;
-        top.gpu_power_ = top.gpu_energy_ / top.time_;
-        top.ddr_power_ = top.ddr_energy_ / top.time_;
-    }
-
-    // Calculates stats for all the tensors in the timestamp file
-    void updatePerOpStats() {
-        const char * const op_file = "profile_data.txt";
-        std::string line;
-        std::ifstream ifs(op_file, std::ios::in);
-
-        // Calculate time and energy for each tensor operation. There are two
-        // possibilities for the file format:
-        // If the line doesn't begin with #, we are looking at FP32 code
-        // without any conversions to/from FP16, and each operation occupies
-        // two consecutive lines in the timestamp file.
-        // If the line does begin with #, we are looking at FP16 code with
-        // conversion routines in the middle. In this case, *after* the current
-        // line, there will be two lines for F2H, two lines for H2F, and then
-        // one line for the end of the operation.
-        while (std::getline(ifs, line)) {
-            std::vector<std::string> tokens;
-            boost::split(tokens, line, boost::is_any_of("\t"));
-            std::string op_name = tokens[0];
-
-            // FP32
-            if (tokens[0][0] != '#') {
-                // First line with tensor op name and start time
-                std::string op_name = tokens[0];
-                const auto start = std::stod(tokens[1]);
-
-                // Second line with tensor op end time
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                const auto finish = std::stod(tokens[1]);
-
-                TensorOp top(op_name, start, finish);
-                calculateTensorEP(top);
-                addTensorOp(op_name, top);
-            } else {
-                // First line with tensor op name and start time
-                std::string op_name = tokens[0].substr(1);
-                const auto start = std::stod(tokens[1]);
-
-                // Second line with f2h
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                std::string f2h_name = op_name + "_f2h";
-                const auto f2h_start = std::stod(tokens[1]);
-
-                // Third line with f2h
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                const auto f2h_finish = std::stod(tokens[1]);
-
-                // Add f2h
-                TensorOp f2h(f2h_name, f2h_start, f2h_finish);
-                calculateTensorEP(f2h);
-                addTensorOp(f2h_name, f2h);
-
-                // Fourth line with h2f
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                std::string h2f_name = op_name + "_h2f";
-                const auto h2f_start = std::stod(tokens[1]);
-
-                // Fifth line with h2f
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                const auto h2f_finish = std::stod(tokens[1]);
-
-                // Add h2f
-                TensorOp h2f(h2f_name, h2f_start, h2f_finish);
-                calculateTensorEP(h2f);
-                addTensorOp(h2f_name, h2f);
-
-                // Sixth and final line with tensor op end time
-                std::getline(ifs, line);
-                tokens.clear();
-                boost::split(tokens, line, boost::is_any_of("\t"));
-                const auto finish = std::stod(tokens[1]);
-
-                // Subtract f2h's and h2f's time and energy to get just the computation's info
-                TensorOp top(op_name, start, finish);
-                calculateTensorEP(top);
-
-                top.time_ -= (f2h.time_ + h2f.time_);
-                top.energy_ -= (f2h.energy_ + h2f.energy_);
-                top.gpu_energy_ -= (f2h.gpu_energy_ + h2f.gpu_energy_);
-                top.ddr_energy_ -= (f2h.ddr_energy_ + h2f.ddr_energy_);
-                top.power_ = top.energy_ / top.time_;
-                top.gpu_power_ = top.gpu_energy_ / top.time_;
-                top.ddr_power_ = top.ddr_energy_ / top.time_;
-
-                addTensorOp(op_name, top);
-            }
-        }
-        ifs.close();
-    }
-
-    void updateStats() {
-        updatePerOpStats();
-        updateTotalStats();
-    }
-
-    // Calculates the average and standard deviation of each metric of each tensor op
-    void calculateAggregateStats() {
-        for (auto it = tensor_info_.begin(); it != tensor_info_.end(); it++) {
-            AggTensorInfo ati;
-            ati.name_ = it->first;
-            auto topv = it->second;
-
-            double total_time = 0.0;
-            double total_energy = 0.0;
-            double total_gpu_energy = 0.0;
-            double total_ddr_energy = 0.0;
-            double total_power = 0.0;
-            double total_gpu_power = 0.0;
-            double total_ddr_power = 0.0;
-
-            double time_sum = 0.0;
-            double energy_sum = 0.0;
-            double gpu_energy_sum = 0.0;
-            double ddr_energy_sum = 0.0;
-            double power_sum = 0.0;
-            double gpu_power_sum = 0.0;
-            double ddr_power_sum = 0.0;
-
-            // Calculate average
-            for (const auto &top : topv) {
-                total_time += top.time_;
-                total_energy += top.energy_;
-                total_gpu_energy += top.gpu_energy_;
-                total_ddr_energy += top.ddr_energy_;
-                total_power += top.power_;
-                total_gpu_power += top.gpu_power_;
-                total_ddr_power += top.ddr_power_;
-            }
-
-            ati.average_time_ = total_time / iterations_;
-            ati.average_energy_ = total_energy / iterations_;
-            ati.average_gpu_energy_ = total_gpu_energy / iterations_;
-            ati.average_ddr_energy_ = total_ddr_energy / iterations_;
-            ati.average_power_ = total_power / iterations_;
-            ati.average_gpu_power_ = total_gpu_power / iterations_;
-            ati.average_ddr_power_ = total_ddr_power / iterations_;
-
-            // Calculate standard deviation
-            for (const auto &top : topv) {
-                auto time_diff = top.time_ - ati.average_time_;
-                time_sum += time_diff * time_diff;
-
-                auto energy_diff = top.energy_ - ati.average_energy_;
-                energy_sum += energy_diff * energy_diff;
-                auto gpu_energy_diff = top.gpu_energy_ - ati.average_gpu_energy_;
-                gpu_energy_sum += gpu_energy_diff * gpu_energy_diff;
-                auto ddr_energy_diff = top.ddr_energy_ - ati.average_ddr_energy_;
-                ddr_energy_sum += ddr_energy_diff * ddr_energy_diff;
-
-                auto power_diff = top.power_ - ati.average_power_;
-                power_sum += power_diff * power_diff;
-                auto gpu_power_diff = top.gpu_power_ - ati.average_gpu_power_;
-                gpu_power_sum += gpu_power_diff * gpu_power_diff;
-                auto ddr_power_diff = top.ddr_power_ - ati.average_ddr_power_;
-                ddr_power_sum += ddr_power_diff * ddr_power_diff;
-            }
-
-            ati.time_std_ = std::sqrt(time_sum / iterations_);
-            ati.energy_std_ = std::sqrt(energy_sum / iterations_);
-            ati.gpu_energy_std_ = std::sqrt(gpu_energy_sum / iterations_);
-            ati.ddr_energy_std_ = std::sqrt(ddr_energy_sum / iterations_);
-            ati.power_std_ = std::sqrt(power_sum / iterations_);
-            ati.gpu_power_std_ = std::sqrt(gpu_power_sum / iterations_);
-            ati.ddr_power_std_ = std::sqrt(ddr_power_sum / iterations_);
-
-            agg_tensor_info_.push_back(ati);
-        }
-    }
-
-public:
-    Profiler() {
-        cpu_stream_.open(cpu_power_rail, std::ifstream::in);
-        gpu_stream_.open(gpu_power_rail, std::ifstream::in);
-        ddr_stream_.open(ddr_power_rail, std::ifstream::in);
-        soc_stream_.open(soc_power_rail, std::ifstream::in);
-        sys_stream_.open(sys_power_rail, std::ifstream::in);
-
-        if (!cpu_stream_.is_open() or !gpu_stream_.is_open() or !ddr_stream_.is_open()
-            or !soc_stream_.is_open() or !sys_stream_.is_open()) {
-            std::cout << "Failed to open one of the power rails for reading\n";
-            exit(1);
-        }
-    }
-
-    ~Profiler() {
-        cpu_stream_.close();
-        gpu_stream_.close();
-        ddr_stream_.close();
-        soc_stream_.close();
-        sys_stream_.close();
-    }
-
-    void profile(const char * const program, const int iterations) {
-        iterations_ = iterations;
-        resetGlobal();
-
-        for (unsigned i = 0; i < iterations_; i++) {
-            resetLocal();
-
-            // Launch two threads: one for running the program and one for
-            // profiling it. Pin the threads to specific cores to remove migration
-            // overhead. Profiling showed that the sampling rate increases slightly
-            // with pinning.
-            std::thread prog(&Profiler::runProgram, this, program);
-            std::thread power(&Profiler::recordPower, this);
-            pinThread(prog, core1);
-            pinThread(power, core2);
-            prog.join();
-            power.join();
-
-            updateStats();
-
-            // Sleep for some time to bring the GPU back to idle
-            std::this_thread::sleep_for(std::chrono::milliseconds(gpu_idle_time));
-        }
-
-        calculateAggregateStats();
-    }
-
-    void dumpTensorInfo(const char * const filename) const {
-        const std::string header = "Op,Time (ms),Energy (mJ),GPU Energy (mJ),DDR Energy (mJ),Power (mW),GPU Power (mW),DDR Power (mW),Time std,Energy std,GPU Energy std,DDR Energy std,Power std,GPU Power std,DDR Power std\n";
-        std::ofstream ofs;
-        ofs.open(filename);
-        //ofs << header;
-        for (const auto &ati : agg_tensor_info_) {
-            ofs << ati.name_
-                << "," << ati.average_time_ * 1e3
-                << "," << ati.average_energy_
-                /*
-                << "," << ati.average_gpu_energy_
-                << "," << ati.average_ddr_energy_
-                << "," << ati.average_power_
-                << "," << ati.average_gpu_power_
-                << "," << ati.average_ddr_power_
-                << "," << ati.time_std_ * 1e3
-                << "," << ati.energy_std_
-                << "," << ati.gpu_energy_std_
-                << "," << ati.ddr_energy_std_
-                << "," << ati.power_std_
-                << "," << ati.gpu_power_std_
-                << "," << ati.ddr_power_std_*/
-                << "\n";
-
-            std::cout << ati.average_time_ * 1e3 << "," << ati.average_energy_ << "\n";
-        }
-        ofs.close();
-    }
-
-    void dumpPowerReadings(const char * const filename) const {
-        std::ofstream ofs;
-        ofs.open(filename);
-        for (const auto &reading : power_readings_) {
-            std::chrono::duration<double> duration = reading.time_ - start_time_;
-            //std::chrono::duration<double> duration = reading.time_.time_since_epoch();
-            ofs << std::to_string(duration.count())
-                << " " << reading.gpu_
-                << " " << reading.ddr_
-                << "\n";
-        }
-        ofs.close();
-    }
-
-    void dumpTotalInfo() const {
-        auto total_time = total_info_.time_ / iterations_;
-
-        auto total_energy = total_info_.energy_ / iterations_;
-        auto gpu_energy = total_info_.gpu_energy_ / iterations_;
-        auto ddr_energy = total_info_.ddr_energy_ / iterations_;
-
-        auto power = total_info_.power_ / iterations_;
-        auto gpu_power = total_info_.gpu_power_ / iterations_;
-        auto ddr_power = total_info_.ddr_power_ / iterations_;
-
-        std::cout << "-----------------------------------------------------\n";
-        std::cout << "Program info (average)\n";
-        std::cout << "-----------------------------------------------------\n";
-        std::cout << "\tExecution time: " << total_time << " seconds\n";
-        std::cout << "\tTotal energy:   " << total_energy << " mJ\n";
-        std::cout << "\t    GPU:        " << gpu_energy << " mJ\n";
-        std::cout << "\t    DDR:        " << ddr_energy << " mJ\n";
-        std::cout << "\tPower:          " << power << " mW\n";
-        std::cout << "\t    GPU:        " << gpu_power << " mW\n";
-        std::cout << "\t    DDR:        " << ddr_power << " mW\n";
-        std::cout << "-----------------------------------------------------\n";
-    }
-};
-
-int main(int argc, char *argv[]) {
-    if (argc < NUM_ARGS) {
-        std::cout << "Usage: " << argv[0] << " <program> <iterations> <tensor output file> [power output file]\n";
-        exit(1);
-    }
-
-    Profiler pp;
-    pp.profile(argv[1], std::stoi(argv[2]));
-    pp.dumpTensorInfo(argv[3]);
-
-    if (argc > NUM_ARGS)
-        pp.dumpPowerReadings(argv[4]);
-
-    return 0;
-}
-
diff --git a/llvm/projects/gpu_profiler/src/profiler.cpp b/llvm/projects/gpu_profiler/src/profiler.cpp
deleted file mode 100644
index cae8823fa23056d44973f3b23d16c4c2f28e89f7..0000000000000000000000000000000000000000
--- a/llvm/projects/gpu_profiler/src/profiler.cpp
+++ /dev/null
@@ -1,200 +0,0 @@
-#include <cassert>
-#include "profiler.h" 
-
-Profiler::Profiler() : should_run_profiler_(false), should_stop_profiler_(false) {
-    // Open all streams. Not done in start_profiler() function bc the streams
-    // should be strictly opened once 
-    cpu_stream_.open(cpu_power_rail, std::ifstream::in);
-    gpu_stream_.open(gpu_power_rail, std::ifstream::in);
-    ddr_stream_.open(ddr_power_rail, std::ifstream::in);
-    soc_stream_.open(soc_power_rail, std::ifstream::in);
-    sys_stream_.open(sys_power_rail, std::ifstream::in);
-
-    // Check if the jetson file id file exists to indirectly check architecture 
-    std::ifstream jetson_file(jetson_chip_id);
-    on_jetson_ = jetson_file.good();
-    if (on_jetson_ && 
-                (!cpu_stream_.is_open() || !gpu_stream_.is_open() 
-                || !ddr_stream_.is_open() || !soc_stream_.is_open() 
-                || !sys_stream_.is_open())) {
-        std::cout << "Failed to open one of the power rails for reading\n";
-        exit(1);
-    }
-}
-
-Profiler::~Profiler() {
-    cpu_stream_.close();
-    gpu_stream_.close();
-    ddr_stream_.close();
-    soc_stream_.close();
-    sys_stream_.close();
-}
-
-// Reinitializes boolean vars used for control flow and launches the profiler 
-// thread. DOES NOT reset other internal data structures. 
-void Profiler::start_profiler(){
-    // Reinitialize in case the profiler object has been used before 
-    should_run_profiler_ = false;
-    should_stop_profiler_ = false;
-    profiler_thread_ = std::thread(&Profiler::run_profiler, this);
-    pin_thread(profiler_thread_, core1);
-}
-
-// Resumes the profiling of whatever executable's currently running
-// DOES NOT reset any data 
-void Profiler::resume_profiler() {
-    {
-        std::unique_lock<std::mutex> mutex_lock(mutex_);
-        if (should_run_profiler_){
-            std::cout << "WARNING: resume_profiler was already called\n"; 
-        }
-        //std::cout<<"RESUME RESUME RESUME RESUME\n";
-        should_run_profiler_ = true;
-        start_time_ = clock_type::now();
-    }
-    cond_var_.notify_one();
-}
-
-// Stops profiler by putting profiler thread to sleep 
-void Profiler::pause_profiler() {
-    {
-        std::unique_lock<std::mutex> mutex_lock(mutex_);
-        if (!should_run_profiler_){
-            std::cout << "WARNING: pause_profiler was already called\n";
-        }
-        //std::cout<<"PAUSE PAUSE PAUSE PAUSE\n";
-        should_run_profiler_ = false;
-        stop_time_ = clock_type::now();
-    }
-    cond_var_.notify_one();
-}
-
-// Gets the delta time and total GPU and DDR energy between the last two
-// calls to resume_profiler and pause_profiler
-//
-// Returns this as a pair of <delta time in milliseconds, energy>
-std::pair<double, double> Profiler::get_time_energy() const {
-    std::unique_lock<std::mutex> mutex_lock(vector_mutex_); // MUST use a mutex
-
-    double total_energy = 0.0;
-    if (on_jetson_) {
-        //std::cout<<"power readings size"<<power_readings_.size()<<'\n';
-        auto prev_time = start_time_;
-        for (size_t i = 0; i < power_readings_.size(); i++){
-			const auto& reading = power_readings_[i];
-            std::chrono::duration<double> duration_secs = reading.time_ - prev_time;
-            total_energy += (reading.gpu_ + reading.ddr_);// * duration_secs.count();
-            prev_time = reading.time_; 
-        }
-    }
-    std::chrono::duration<double, std::milli> duration_milli = stop_time_ - start_time_;
-    double delta_time = duration_milli.count();
-    return std::make_pair(delta_time, total_energy);
-}
-
-void Profiler::reset() {
-    std::unique_lock<std::mutex> bool_var_lock(mutex_); 
-    std::unique_lock<std::mutex> vector_lock(vector_mutex_); 
-    should_stop_profiler_ = false; // Can call reset after calling pause_profiler()
-    should_run_profiler_ = false; // Can call reset after calling resume 
-    power_readings_.clear();
-    run_profiler_overhead = 0.0;
-}
-
-// Exit the profiler and kill the thread
-// Must call start_profiler() to reuse this object after calling pause_profiler()
-void Profiler::stop_profiler() { 
-    std::cout << "Exiting profiler\n";
-    should_stop_profiler_ = true;
-    cond_var_.notify_one();
-    profiler_thread_.join();
-}
-
-// Obtain's a single power reading from the GPU and DDR rails
-void Profiler::obtain_power_reading() {
-    std::unique_lock<std::mutex> mutex_lock(vector_mutex_); // MUST use a mutex
-
-    PowerReading reading;
-
-    // The order matters here. All the reads have to happen together first
-    // and then all the seeks have to happen together at the end, otherwise
-    // there will be a significant time difference between the readings of
-    // the different rails.
-    reading.time_ = clock_type::now(); 
-    if (on_jetson_){
-        gpu_stream_ >> reading.gpu_;
-        ddr_stream_ >> reading.ddr_;
-
-		gpu_stream_.seekg(0);
-		ddr_stream_.seekg(0);
-
-    } else {
-        reading.gpu_ = 0.0;
-        reading.ddr_ = 0.0;
-    }
-    power_readings_.push_back(reading);
-}
-
-// Pins the given thread to the specified core
-void Profiler::pin_thread(std::thread &t, const unsigned core) const {
-    cpu_set_t cpuset;
-    CPU_ZERO(&cpuset);
-    CPU_SET(core, &cpuset);
-    if (pthread_setaffinity_np(t.native_handle(), sizeof(cpu_set_t), &cpuset) != 0)
-        std::cout << "Couldn't set thread affinity\n";
-}
-
-// Runs the profiler thread, keeping it alive by wrapping the functionality
-// in an infinite loop 
-void Profiler::run_profiler(){
-    while (true){
-        if (should_stop_profiler_) {
-            break;
-        }
-        // Need to lock the mutex and check the condition var 
-        {
-            std::unique_lock<std::mutex> mutex_lock(mutex_);
-            if (should_stop_profiler_) {
-                break;
-            }
-            // Wake the thread up when it's time to run the profiler or exit
-            // the profiler 
-            cond_var_.wait(mutex_lock, [this]{return should_run_profiler_
-                        || should_stop_profiler_; });
-        }
-        if (should_stop_profiler_) {
-            break;
-        }
-        obtain_power_reading();
-    }
-}
-
-/*
-// TESTS
-void resume_pause_profiler(Profiler& profile_wrapper, unsigned long sleep_millis){
-    profile_wrapper.resume_profiler(); 
-    std::this_thread::sleep_for(std::chrono::milliseconds(sleep_millis));
-    profile_wrapper.pause_profiler();
-
-    auto time_energy_pair = profile_wrapper.get_time_energy();
-    profile_wrapper.reset();
-    //if (time_energy_pair.first > sleep_millis + 1 || time_energy_pair.first < sleep_millis - 1){
-        printf("WARNING: time: %f, energy: %f\n", time_energy_pair.first, time_energy_pair.second);
-    //}
-    std::this_thread::sleep_for(std::chrono::milliseconds(sleep_millis));
-    //std::cout<<"\n\n";
-}
-
-int main(){
-    Profiler profile_wrapper;
-    profile_wrapper.start_profiler();
-
-    unsigned long sleep_millis = 25;
-    for (size_t i = 0; i < 50; i++){
-        resume_pause_profiler(profile_wrapper, sleep_millis);
-    }
-    // IMPORTANT
-    profile_wrapper.stop_profiler();
-    return 0;
-}
-*/
diff --git a/llvm/projects/hpvm-tensor-rt/.gitignore b/llvm/projects/hpvm-tensor-rt/.gitignore
deleted file mode 100644
index 9581f1d5b06aaafaaae77ef6175bc243707e4685..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-./build
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/CMakeLists.txt b/llvm/projects/hpvm-tensor-rt/CMakeLists.txt
deleted file mode 100644
index 0eab861beac699407114d0cae4e063f41dbdab95..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/CMakeLists.txt
+++ /dev/null
@@ -1,259 +0,0 @@
-cmake_minimum_required (VERSION 2.6)
-project (cudnn-training)
-
-find_package(CUDA 6.5 REQUIRED)
-
-
-if (CMAKE_BUILD_TYPE STREQUAL "Debug")
-  message("Debug mode")
-    set(CUDA_NVCC_FLAGS ${CUDA_NVCC_FLAGS};-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_60,code=compute_60;-std=c++11;-g;-lineinfo;-Xcompiler;-ggdb;-lcurand)
-else()
-   set(CUDA_NVCC_FLAGS ${CUDA_NVCC_FLAGS};-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_60,code=compute_60;-std=c++11;-DNDEBUG;-Xcompiler;-DNDEBUG;-lcurand)
-endif()
-
-set(CUDA_PROPAGATE_HOST_FLAGS OFF)
-
-# Addresses a bug where code is not compiled as C++11 in non-CUDA code and older g++ versions
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11  -I/  " )
-
-add_definitions(-DNO_INJECTION)
-add_definitions(-DPROMISE_TUNER_ENABLED)
-if(USE_GFLAGS)
-  add_definitions(-DUSE_GFLAGS)
-endif()
-
-if(USE_AUTOTUNER)
-  remove_definitions(-DNO_INJECTION)
-endif()
-
- 
-
-include_directories($ENV{CUDNN_PATH} $ENV{CUDNN_PATH}/include)
-include_directories(./tensor_runtime/include)
-include_directories(../gpu_profiler/include)
-include_directories(../soc_simulator/include)
-link_directories($ENV{CUDNN_PATH} $ENV{CUDNN_PATH}/lib $ENV{CUDNN_PATH}/lib64)
-
-
-# Adding new rule for building a cuDNN runtime library
-cuda_add_library(tensor_runtime tensor_runtime/src/tensor_runtime.cu)
-cuda_add_cublas_to_target(tensor_runtime)
-
-# Adding new rule for building a cuDNN runtime library
-cuda_add_library(tensor_cpu_runtime tensor_runtime/src/tensor_cpu_runtime.cc)
-
-find_library(GPU_PROFILER_LIB
-    NAMES libgpu_profiler.a
-    HINTS ../gpu_profiler/lib
-)
-
-find_library(SOC_SIMULATOR_LIB
-    NAMES libpromise_profiler.a
-    HINTS ../soc_simulator/lib
-)
-
-
-if(USE_GFLAGS)
-  target_link_libraries(tensor_runtime gflags cudnn -lcurand)
-else()
-  target_link_libraries(tensor_runtime cudnn -lcurand)
-endif()
-
-target_link_libraries(tensor_cpu_runtime)
-
-# Adding rule for the debugging source
-add_executable(test_ops  dnn_sources/src/test_ops.cc)
-target_link_libraries(test_ops  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-#**** CPU sources
-add_executable(fc2_cpu  dnn_sources/src/fc2_cpu.cc)
-target_link_libraries(fc2_cpu  tensor_cpu_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-
-# Full-Precision versions
-add_executable(lenet_tanh  dnn_sources/src/lenet2_tanh.cc)
-target_link_libraries(lenet_tanh  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras  dnn_sources/src/lenet_keras.cc)
-target_link_libraries(lenet_keras  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_int32  dnn_sources/src/lenet_int32.cc)
-target_link_libraries(lenet_int32  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-add_executable(alexnet_cifar10  dnn_sources/src/alexnet_cifar10_front.cc)
-target_link_libraries(alexnet_cifar10  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(alexnet_cifar10_tuner  dnn_sources/src/alexnet_cifar10_tuner.cc)
-target_link_libraries(alexnet_cifar10_tuner  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(alexnet_cifar10_approx  dnn_sources/src/alexnet_cifar10_approx.cc)
-target_link_libraries(alexnet_cifar10_approx  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(alexnet2_cifar10  dnn_sources/src/alexnet2_cifar10.cc)
-target_link_libraries(alexnet2_cifar10  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(alexnet2_cifar10_tuner  dnn_sources/src/alexnet2_cifar10_tuner.cc)
-target_link_libraries(alexnet2_cifar10_tuner  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(vgg16_cifar10  dnn_sources/src/vgg16_cifar10.cc)
-target_link_libraries(vgg16_cifar10  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(vgg16_cifar10_tuner  dnn_sources/src/vgg16_cifar10_tuner.cc)
-target_link_libraries(vgg16_cifar10_tuner  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(resnet18_cifar10  dnn_sources/src/resnet18_cifar10.cc)
-target_link_libraries(resnet18_cifar10  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(resnet18_cifar10_approx  dnn_sources/src/resnet18_cifar10_approx.cc)
-target_link_libraries(resnet18_cifar10_approx  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(resnet18_cifar10_inputapprox  dnn_sources/src/resnet18_cifar10_inputapprox.cc)
-target_link_libraries(resnet18_cifar10_inputapprox  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(resnet18_cifar10_tuner  dnn_sources/src/resnet18_cifar10_tuner.cc)
-target_link_libraries(resnet18_cifar10_tuner  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(vgg16_cifar100  dnn_sources/src/vgg16_cifar100.cc)
-target_link_libraries(vgg16_cifar100  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(vgg16_cifar100_tuner  dnn_sources/src/vgg16_cifar100_tuner.cc)
-target_link_libraries(vgg16_cifar100_tuner  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(vgg16_cifar100_top5  dnn_sources/src/vgg16_cifar100_5.cc)
-target_link_libraries(vgg16_cifar100_top5  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-# REF binaries
-add_executable(mobilenet_cifar10  dnn_sources/src/mobilenet_cifar10.cc)
-target_link_libraries(mobilenet_cifar10  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-add_executable(mobilenet_cifar10_shallow  dnn_sources/src/mobilenet_cifar10_shallow.cc)
-target_link_libraries(mobilenet_cifar10_shallow  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-
-#### Image Pipeline Tuning sources
-
-add_executable(pipeline_GEMO  dnn_sources/src/pipeline_GEMO.cc)
-target_link_libraries(pipeline_GEMO  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GEO  dnn_sources/src/pipeline_GEO.cc)
-target_link_libraries(pipeline_GEO  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GEOM  dnn_sources/src/pipeline_GEOM.cc)
-target_link_libraries(pipeline_GEOM  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GSM  dnn_sources/src/pipeline_GSM.cc)
-target_link_libraries(pipeline_GSM  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GSME  dnn_sources/src/pipeline_GSME.cc)
-target_link_libraries(pipeline_GSME  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-
-
-#*** Half precision networks
-
-
-#add_executable(fc4_half  dnn_sources/src/half/fc4_half.cc)
-#target_link_libraries(fc4_half  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-#add_executable(lenet_tanh_half  dnn_sources/src/half/lenet_tanh_half.cc)
-#target_link_libraries(lenet_tanh_half  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-#add_executable(lenet_keras_half  dnn_sources/src/half/lenet_keras_half.cc)
-#target_link_libraries(lenet_keras_half  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-#********* Promise API sources
-add_executable(lenet_promise  dnn_sources/src/promise/lenet_promise.cc)
-target_link_libraries(lenet_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-# Quantized PROMISE sources
-add_executable(alexnet_promise  dnn_sources/src/promise/alexnet_promise.cc)
-target_link_libraries(alexnet_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(alexnet2_promise  dnn_sources/src/promise/alexnet2_promise.cc)
-target_link_libraries(alexnet2_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(resnet18_promise  dnn_sources/src/promise/resnet18_promise.cc)
-target_link_libraries(resnet18_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(vgg16_cifar100_promise  dnn_sources/src/promise/vgg16_cifar100_promise.cc)
-target_link_libraries(vgg16_cifar100_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(vgg16_cifar10_promise  dnn_sources/src/promise/vgg16_cifar10_promise.cc)
-target_link_libraries(vgg16_cifar10_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-# REF Source - BUILT After Support for SMART QUANTIZATION
-add_executable(mobilenet_promise  dnn_sources/src/promise/mobilenet_promise.cc)
-target_link_libraries(mobilenet_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(mobilenet_shallow_promise  dnn_sources/src/promise/mobilenet_shallow_promise.cc)
-target_link_libraries(mobilenet_shallow_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-
-#### Image Pipeline PROMISE sources
-add_executable(pipeline_GEMO_promise  dnn_sources/src/promise/pipeline_GEMO_promise.cc)
-target_link_libraries(pipeline_GEMO_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GEO_promise  dnn_sources/src/promise/pipeline_GEO_promise.cc)
-target_link_libraries(pipeline_GEO_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GEOM_promise  dnn_sources/src/promise/pipeline_GEOM_promise.cc)
-target_link_libraries(pipeline_GEOM_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GSM_promise  dnn_sources/src/promise/pipeline_GSM_promise.cc)
-target_link_libraries(pipeline_GSM_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GSME_promise  dnn_sources/src/promise/pipeline_GSME_promise.cc)
-target_link_libraries(pipeline_GSME_promise  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-
-#############  Promise Validation Sources #############
-
-add_executable(alexnet_valid  dnn_sources/src/promise/alexnet_valid.cc)
-target_link_libraries(alexnet_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(alexnet2_valid  dnn_sources/src/promise/alexnet2_valid.cc)
-target_link_libraries(alexnet2_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(resnet18_valid  dnn_sources/src/promise/resnet18_valid.cc)
-target_link_libraries(resnet18_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(vgg16_cifar100_valid  dnn_sources/src/promise/vgg16_cifar100_valid.cc)
-target_link_libraries(vgg16_cifar100_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(vgg16_cifar10_valid  dnn_sources/src/promise/vgg16_cifar10_valid.cc)
-target_link_libraries(vgg16_cifar10_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-add_executable(mobilenet_valid  dnn_sources/src/promise/mobilenet_valid.cc)
-target_link_libraries(mobilenet_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(mobilenet_shallow_valid  dnn_sources/src/promise/mobilenet_shallow_valid.cc)
-target_link_libraries(mobilenet_shallow_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-##### Image pipeline validation sources
-add_executable(pipeline_GEMO_valid  dnn_sources/src/promise/pipeline_GEMO_valid.cc)
-target_link_libraries(pipeline_GEMO_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GEO_valid  dnn_sources/src/promise/pipeline_GEO_valid.cc)
-target_link_libraries(pipeline_GEO_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GEOM_valid  dnn_sources/src/promise/pipeline_GEOM_valid.cc)
-target_link_libraries(pipeline_GEOM_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GSM_valid  dnn_sources/src/promise/pipeline_GSM_valid.cc)
-target_link_libraries(pipeline_GSM_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(pipeline_GSME_valid  dnn_sources/src/promise/pipeline_GSME_valid.cc)
-target_link_libraries(pipeline_GSME_valid  tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
diff --git a/llvm/projects/hpvm-tensor-rt/CMakeLists_cpu.txt b/llvm/projects/hpvm-tensor-rt/CMakeLists_cpu.txt
deleted file mode 100644
index cff0129c2aa02b9776ed7bba8e92029d2c2560e8..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/CMakeLists_cpu.txt
+++ /dev/null
@@ -1,19 +0,0 @@
-cmake_minimum_required (VERSION 2.6)
-project (approxhpvm-tensorRt-cpu)
-
-
-# Addresses a bug where code is not compiled as C++11 in non-CUDA code and older g++ versions
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 " )
- 
-
-# Adding new rule for building a cuDNN runtime library
-add_library(tensor_cpu_runtime tensor_runtime/src/tensor_cpu_runtime.cc)
-target_link_libraries(tensor_cpu_runtime)
-
-
-#**** CPU sources
-add_executable(fc2_cpu  dnn_sources/src/fc2_cpu.cc)
-target_link_libraries(fc2_cpu  tensor_cpu_runtime)
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/README.md b/llvm/projects/hpvm-tensor-rt/README.md
deleted file mode 100644
index 5b3e5f99d39cf5c697051fa2580eb74f207bb031..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/README.md
+++ /dev/null
@@ -1,68 +0,0 @@
-# AppproxHPVM Tensor Runtime
-
-## Dependencies
-
-* CUDNN-7.0 or above
-* CUDA-9.0 or above
-* CUBLAS-9.0 or above - often included with cuda-toolkit
-
-## Dependent Library Builds
-
-```shell
-cd ../gpu_profiler
-mkdir lib
-cmake ../
-make
-
-cd ../soc_simulator
-mkdir lib
-cmake ../
-make
-```
-
-
-## BUILD
-
-```shell
-source bin/setup_cuda_llvm_paths.sh
-mkdir build
-cd build
-cmake ../
-make
-```
-
-
-## Directory Structure
-
-* ./tensor_runtime:
-  * ./tensor_runtime/include/: Include files for Tensor Runtime
-  * ./tensor_runtime/include/tensor_signatures.cc: Include file with Tensor RT signatures
-    * NOTE: UPDATE this with updated API
-  * ./tensor_runtime/src/: HPVM Tensor RT sources
-  
-* ./dnn_sources:
-  * ./dnn_sources/src/${BENCH}.cc: Per Bench FULL-precision source
-  * ./dnn_sources/src/half/${BENCH}.cc: Per Bench HALF-precision source
-  * ./dnn_sources/src/promise/${BENCH}.cc: Per Bench PROMISE-API source
-  
-* ./lib:
-  * ./lib/tensor_runtime.ll
-    * NOTE: generated from ./tensor_runtime/include/tensor_signatures.cc
-  * ./lib/libtensor_runtime.a
-    * NOTE: Linked against HPVM benchmarks
-  * ./lib/libtensor_autotuner.a
-    * NOTE: error-injection library linked with benchmarks
-    
-* ./bin:
-  * ./bin/install_runtime.sh: Script for moving Tensor RT files to ./lib
-  * ./bin/run_autotuner.py: Python script for running Autotuner experiments
-  * ./bin/setup_tyler_paths.sh: Tyler-specific path setup for Tensor RT
-  * ./bin/setup_jetson.sh: Jetson board specific path setup for Tensor RT
-  * ./bin/swing_selection.py: Script for hardware mapping
-    * NOTE: Includes the L2,L1 norm mapping to hardware knobs
-
-* ./opentuner:
-  * ./opentuner/autotuner/: Autotuner scripts
-  * ./opentuner/autotuner/approxhpvm_tuner.py: Tuner script for ApproxHPVM binaries
-  * ./opentuner/autotuner/promise_tuner.py: Tuner script for tuning PROMISE voltage levels
-  
diff --git a/llvm/projects/hpvm-tensor-rt/bin/error_sensitivity.py b/llvm/projects/hpvm-tensor-rt/bin/error_sensitivity.py
deleted file mode 100644
index 9f2ffb3eacd3cb81bcefb4b44a48f1d0a8a8356d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/error_sensitivity.py
+++ /dev/null
@@ -1,139 +0,0 @@
-
-
-import subprocess
-import os
-import operator
-
-
-def constructTunerFile(num_flags, tensor_id, error_level, default_error):
-
-  f = open("opentuner_flags", "w+")
-
-  for i in range(num_flags):
-    if i == tensor_id:
-      f.write(str(error_level) + "\n")
-    else:
-      f.write(str(default_error) + "\n")
-
-  f.close()
-    
-
-
-def runAndTestError(binary_name, gold_acc):
-
-  num_runs = 20
-
-  binary_name = "./" + binary_name
-  FNULL = open(os.devnull, 'wb')
-  p = subprocess.Popen([binary_name, str(num_runs)], stdout = FNULL)
-  p.wait()
-
-  f = open("run_accuracies.txt")
-
-  total_err = 0.0
-  for x in f:
-    acc = float(x.strip())    
-    total_err += (gold_acc - acc)
-
-  avg_err = total_err / num_runs
-
-  return avg_err
-    
-
-
-
-def test_sensitivity(Bench):
-
-  tensor_errors = []
-  
-  error_levels = [6, 9, 12, 15]
-  num_flags = Bench.num_flags
-
-  for tensor_id in range(num_flags):
-    total_error = 0
-    for error_level in error_levels:
-      constructTunerFile(num_flags, tensor_id, error_level, 0)
-      error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
-      print (tensor_id, error_level, error)
-      total_error += error
-
-    avg_error = total_error / len(error_levels)
-
-    tensor_errors.append([tensor_id, avg_error])
-
-
-  print ("\n\n*** Per-Tensor Avg Errors \n\n")
-
-  f_name = Bench.base_dir + "/tensor_errors_1000.txt"  
-  f = open(f_name, "w+")
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-    f.write(str(i) +  "\t" + str(tensor_errors[i][1]) + "\n")
-
-  f.close()
-
-  f_name = Bench.base_dir + "/tensor_errors_ranked_1000.txt"  
-  f2 = open(f_name, "w+")
-  tensor_errors.sort(key=operator.itemgetter(1))
-
-
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-
-    f2.write(str(tensor_errors[i][0]) +  "\t" + str(tensor_errors[i][1]) + "\n")
-    
-
-  f2.close()
-
-
-
-def test_sensitivity2(Bench):
-
-  num_flags = Bench.num_flags
-
-  constructTunerFile(num_flags, 0, 3, 3)
-  error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
-
-  ref_acc = Bench.tuner_accuracy - error
-  print ("*** Gold accuracy = ", Bench.tuner_accuracy, "  Ref accuracy = ", ref_acc, " *** \n\n")
-  
-  
-  tensor_errors = []
-  
-  error_levels = [6, 9, 12, 15]
-
-  for tensor_id in range(num_flags):
-    total_error = 0
-    for error_level in error_levels:
-      constructTunerFile(num_flags, tensor_id, error_level, 3)
-      error = runAndTestError(Bench.tuner_binary, ref_acc)
-      print (tensor_id, error_level, error)
-      total_error += error
-
-    avg_error = total_error / len(error_levels)
-
-    tensor_errors.append([tensor_id, avg_error])
-
-
-  print ("\n\n*** Per-Tensor Avg Errors \n\n")
-
-  f_name = Bench.base_dir + "/tensor_composite_errors.txt"  
-  f = open(f_name, "w+")
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-    f.write(str(i) +  "\t" + str(tensor_errors[i][1]) + "\n")
-
-  f.close()
-
-  f_name = Bench.base_dir + "/tensor_composite_errors_ranked.txt"  
-  f2 = open(f_name, "w+")
-  tensor_errors.sort(key=operator.itemgetter(1))
-
-
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-
-    f2.write(str(tensor_errors[i][0]) +  "\t" + str(tensor_errors[i][1]) + "\n")
-    
-
-  f2.close()
diff --git a/llvm/projects/hpvm-tensor-rt/bin/exhaustive.py b/llvm/projects/hpvm-tensor-rt/bin/exhaustive.py
deleted file mode 100644
index bae38bf7e497897ae3db4e12dce48914903739fb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/exhaustive.py
+++ /dev/null
@@ -1,140 +0,0 @@
-
-import os
-import sys
-import shutil
-import subprocess
-import shutil
-
-
-
-class Benchmark:
-  def __init__(self):
-    self.binary = ""
-    self.num_flags = 4
-
-    
-
-Alexnet1 = Benchmark()
-Alexnet1.binary = "./lenet_keras_promise"
-Alexnet1.accuracy = 98.8
-Alexnet1.flags = [[8], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4] ] 
-
-
-Alexnet2 = Benchmark()
-Alexnet2.binary = "./fc4_clipped_promise"
-Alexnet2.accuracy = 93.72 
-Alexnet2.flags = [[3, 4, 5, 6, 7], [2, 3, 4, 5, 6, 7], [2, 3, 4, 5, 6, 7], [2, 3, 4, 5, 6, 7] ] 
-
-
-
-def dumpConfig(conf_flags, dir_prefix, file_id):
-  
-  shutil.copy("promise_flags", dir_prefix + "/" + str(file_id) + ".txt")
-
-  
-def dumpFinalConfigs(final_confs, dir_prefix):
-
-  f = open(dir_prefix + "/final_confs.txt", "w+")
-  for conf in final_confs:
-    ind = 0
-    for flag in conf:
-      f.write(str(flag))
-      if ind < len(conf) - 1:
-        f.write(",")
-      
-      ind += 1
-    f.write("\n")   
-
-  f.close()  
-
-
-def getAccuracy():
-  
-  file = open("final_accuracy", "r")
-  acc_str = file.read()
-  file.close()
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-
-  print accuracy 
-  return accuracy
-
-    
-
-
-def testConfidence(binary, target_acc, total_runs):
-
-  for i in range(total_runs):  
-    p = subprocess.Popen("./" + binary, shell=False)
-    p.wait()  
-    acc = getAccuracy()
-    if acc < target_acc:
-      return False
-
-  return True
-
-    
-def singleRun(binary):
-
-  p = subprocess.Popen("./" + binary, shell=False)
-  p.wait()  
-
-  return getAccuracy()
-
-  
-
-def createPromiseFile(conf_flags):
-
-    f = open("promise_flags", "w+")
-    for flag in conf_flags:
-        f.write(str(flag) + "\n")
-    f.close()
-    
-
-
-def runExhaustive(Bench, threshold, dir_prefix):
-
-  flags = Bench.flags
-  
-  accepted_confs = []
-  ind = 0
-  for flag1 in flags[0]:
-    for flag2 in flags[1]:
-      for flag3 in flags[2]:
-        for flag4 in flags[3]:
-          print (flag1, flag2, flag3, flag4)
-          conf_flags = []
-          conf_flags.append(flag1)
-          conf_flags.append(flag2)
-          conf_flags.append(flag3)
-          conf_flags.append(flag4)        
-    
-          createPromiseFile(conf_flags)
-
-          accuracy = singleRun(Bench.binary)
-          target_acc = Bench.accuracy - threshold
-          
-          if accuracy > target_acc:
-            if testConfidence(Bench.binary, target_acc, 3):
-              dumpConfig(conf_flags, dir_prefix, ind)
-              accepted_confs.append(conf_flags)
-
-          ind += 1   
-              
-  dumpFinalConfigs(accepted_confs, dir_prefix)
-  
-              
-
-if __name__ == "__main__":
-
-    #runExhaustive(Alexnet1, 1.0, "lenet_1")
-    #runExhaustive(Alexnet1, 2.0, "lenet_2")
-      
-    runExhaustive(Alexnet2, 1.0, "fc4_1")
-    runExhaustive(Alexnet2, 2.0, "fc4_2")
-      
-    
diff --git a/llvm/projects/hpvm-tensor-rt/bin/extractQuantRange.py b/llvm/projects/hpvm-tensor-rt/bin/extractQuantRange.py
deleted file mode 100644
index 0b7f09d92e91894d284b40cc0bd2d346c08e36c7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/extractQuantRange.py
+++ /dev/null
@@ -1,42 +0,0 @@
-
-
-import sys
-
-
-if __name__ == "__main__":
-
-    f = open(sys.argv[1], "r")
-    f2 = open("quant_ranges.txt", "w+")
-
-    layer_line = False
-    for x in f:
-        if "ConvLayer_PROMISE" in x or "FCLayer_PROMISE" in x or layer_line == True:
-            if layer_line == True:
-              layer_line = False
-            else:
-              layer_line = True
-            
-            print x 
-            toks = x.split(",")
-
-            for tok in toks:
-                tok = tok.strip()
-                tok_val = ""
-                try:
-                    tok_val = float(tok)
-                    try:
-                        tok_val = int(tok)
-                    except: 
-                        print (tok_val)
-                        f2.write(str(tok_val) + " ")
-                        #f2.write("tok_val = ", tok_val + " ")
-                except:
-                    continue
-
-            f2.write("\n")
-    
-
-    f.close()
-    f2.close()
-
-        
diff --git a/llvm/projects/hpvm-tensor-rt/bin/install_runtime.sh b/llvm/projects/hpvm-tensor-rt/bin/install_runtime.sh
deleted file mode 100644
index 33a54cd0de626113e5cf11e2f6a6928d4fa384eb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/install_runtime.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-export HPVM_TENSOR_RT_HOME=/home/hsharif3/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/
-export PATH=/home/hsharif3/Gitlab/hpvm/build/bin/:$PATH
-
-clang++ -I/software/cuda-9.1/include -emit-llvm -c ${HPVM_TENSOR_RT_HOME}/tensor_runtime/include/tensor_signatures.cc -o ${HPVM_TENSOR_RT_HOME}/lib/tensor_runtime.bc
-llvm-dis --version
-llvm-dis ${HPVM_TENSOR_RT_HOME}/lib/tensor_runtime.bc
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/bin/mark_depthwise.py b/llvm/projects/hpvm-tensor-rt/bin/mark_depthwise.py
deleted file mode 100644
index c64a9f242fcf80b585c5862ceef16b8fb8ce50a5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/mark_depthwise.py
+++ /dev/null
@@ -1,48 +0,0 @@
-
-import sys
-
-
-def loadLayerDesc(layer_desc_file):
-
-    layer_desc = []
-    f = open(layer_desc_file)
-    for x in f:
-      vals = x.split()
-      layer_desc.append(vals)
-
-    return layer_desc      
-
-
-
-if __name__ == "__main__":
-
-  if len(sys.argv) < 4:
-      print ("Usage: python mark_depthwise.py  $layer_file  $input_conf  $output_conf")
-      
-  layer_file_name = sys.argv[1]
-  input_file_name = sys.argv[2]
-  output_file_name = sys.argv[3]
-
-  
-  layer_desc = loadLayerDesc(layer_file_name)
-
-  f_in = open(input_file_name)
-  f_out = open(output_file_name, "w+")
-    
-  for x in f_in:
-      it = 0
-      confs = x.split(",")
-      print confs
-      for conf in confs:
-          print (" it = ", it, " layer_desc[it] = ", layer_desc[it], " \n")
-          if layer_desc[it][0] == "depthwise_conv":
-              f_out.write("9,")
-          else:
-              f_out.write(conf)
-              if it < len(confs) - 1:
-                  f_out.write(",")
-
-          it += 1        
-
-  f_in.close()
-  f_out.close()
diff --git a/llvm/projects/hpvm-tensor-rt/bin/measure_conf_accuracy.py b/llvm/projects/hpvm-tensor-rt/bin/measure_conf_accuracy.py
deleted file mode 100644
index 4ca1f3f52e59498725414f37e56e06e5e74f1953..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/measure_conf_accuracy.py
+++ /dev/null
@@ -1,316 +0,0 @@
-
-import os
-import sys
-import shutil
-import subprocess
-import shutil
-import numpy as np
-
-
-
-class Benchmark:
-  def __init__(self):
-    self.binary = ""
-
-
-
-benchmarks = {} 
-
-Alexnet1 = Benchmark()
-Alexnet1.binary = "./lenet_keras_promise"
-Alexnet1.accuracy = 98.7
-Alexnet1.loss1_conf = "8 8 8 8,4,4,7"
-Alexnet1.loss2_conf = "8 8 8 8,3,4,7"
-
-benchmarks["lenet"] = Alexnet1
-
-
-Alexnet2 = Benchmark()
-Alexnet2.binary = "./fc4_clipped_promise"
-Alexnet2.accuracy = 93.72 
-Alexnet2.loss1_conf = "7,7,6,7"
-Alexnet2.loss2_conf = "4,4,4,5"
-
-benchmarks["fc4"] = Alexnet2
-
-
-Alexnet3 = Benchmark()
-Alexnet3.binary = "./alexnet_valid"
-Alexnet3.accuracy = 79.16 
-Alexnet3.loss1_conf = "8 8 8 8,6,6,6,7,7"
-Alexnet3.loss2_conf = "8 8 8 8,4,4,6,4,7"
-
-benchmarks["alexnet"] = Alexnet3
-
-
-Alexnet4 = Benchmark()
-Alexnet4.binary = "./alexnet2_valid"
-Alexnet4.accuracy = 85.09
-Alexnet4.loss1_conf = "9 9 9,7,7,7,9 9 9,7,9 9"
-Alexnet4.loss2_conf = "9 9 9,7,7,6,8 8 8,6,9 9"
-
-benchmarks["alexnet2"] = Alexnet4
-
-
-Alexnet5 = Benchmark()
-Alexnet5.binary = "./resnet18_valid"
-Alexnet5.accuracy = 89.44 
-Alexnet5.loss1_conf = "9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8"
-Alexnet5.loss2_conf = "9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,7,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8,8 8"
-
-benchmarks["resnet"] = Alexnet5
-
-
-
-Alexnet6 = Benchmark()
-Alexnet6.binary = "./vgg16_cifar10_valid"
-Alexnet6.accuracy = 89.41
-Alexnet6.loss1_conf = "9 9 9,7,7,7,9 9 9,8 8 8,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9"
-Alexnet6.loss2_conf = "9 9 9,5,5,8 8 8 8,4,6,4,7,8 8 8,4,4,4,7,8 8 8,8 8"
-
-benchmarks["vgg16_cifar10"] = Alexnet6
-
-
-Alexnet7 = Benchmark()
-Alexnet7.binary = "./vgg16_cifar100_valid"
-Alexnet7.accuracy = 66.19
-Alexnet7.loss1_conf = "9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,7,7,8 8 8,8 8 8 8,7,7,8 8 8 8,8 8 8,8 8"
-Alexnet7.loss2_conf = "9 9 9,8 8 8 8,8 8 8,7,8 8 8,8 8 8,8 8 8 8,6,6,7,8 8 8,7,6,8 8 8,8 8"
-
-benchmarks["vgg16_cifar100"] = Alexnet7
-
-
-
-Alexnet8 = Benchmark()
-Alexnet8.binary = "./pipeline_GEOM_valid"
-Alexnet8.loss1_conf = "8 8,8 8 8,8 8,7"
-Alexnet8.loss2_conf = "8 8,8 8 8,8 8,6"
-
-benchmarks["pipeline_GEOM"] = Alexnet8
-
-
-
-Alexnet9 = Benchmark()
-Alexnet9.binary = "./pipeline_GEMO_valid"
-Alexnet9.loss1_conf = "8 8,8 8 8,8 8,8 8"
-Alexnet9.loss2_conf = "7,8 8 8,8 8,8 8"
-
-benchmarks["pipeline_GEMO"] = Alexnet9
-
-
-
-Alexnet10 = Benchmark()
-Alexnet10.binary = "./pipeline_GEO_valid"
-Alexnet10.loss1_conf = "8 8,8 8 8,8 8"
-Alexnet10.loss2_conf = "8 8,8 8 8,8 8"
-
-benchmarks["pipeline_GEO"] = Alexnet10
-
-
-
-Alexnet11 = Benchmark()
-Alexnet11.binary = "./pipeline_GSM_valid"
-Alexnet11.loss1_conf = "8 8,8 8,7"
-Alexnet11.loss2_conf = "7,8 8,6"
-
-benchmarks["pipeline_GSM"] = Alexnet11
-
-
-
-Alexnet12 = Benchmark()
-Alexnet12.binary = "./pipeline_GSME_valid"
-Alexnet12.loss1_conf = "8 8,8 8,8 8,8 8 8"
-Alexnet12.loss2_conf = "7,8 8,8 8,8 8 8"
-
-benchmarks["pipeline_GSME"] = Alexnet12
-
-
-
-def createPromiseFile(conf_flag_str):
-
-    conf_flags = conf_flag_str.split(",")   
-    f = open("promise_flags", "w+")
-    for flag_str in conf_flags:
-        flags = flag_str.split()
-        f.write(str(flags[0]) + "\n")
-    f.close()
-
-    
-def getRunAccuracies():
-
-  run_accuracies = []  
-  file = open("run_accuracies.txt", "r")
-  file_str = file.read()
-
-  for flag in file_str.split("\n"):
-    print ("*** flag = ", flag)
-    flag = flag.strip()
-    if flag == "":
-        continue  
-    run_accuracies.append(float(flag))        
-
-  file.close()
-
-  return run_accuracies
-    
-
-
-def testConfidence(binary):
-
-  p = subprocess.Popen("./" + binary, shell=False)
-  p.wait()  
-  run_accuracies = getRunAccuracies()
-        
-  return np.mean(run_accuracies), np.std(run_accuracies)
-
-
-
-def getAccuracy():
-  
-  file = open("final_accuracy", "r")
-  acc_str = file.read()
-  file.close()
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-
-  #print accuracy 
-  return accuracy
-
-
-def getPSNR():
-  
-  file = open("avg_psnr", "r")
-  acc_str = file.read()
-  file.close()
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return -100
-
-  #print accuracy 
-  return accuracy
-
-
-
-
-def testPSNRConfidence(binary, total_runs):
-
-  run_accuracies = []
-  run_psnr = []
-  for i in range(total_runs):  
-    p = subprocess.Popen("./" + binary, shell=False)
-    p.wait()  
-    acc = getAccuracy()
-    psnr = getPSNR()
-    run_accuracies.append(acc)
-    run_psnr.append(psnr)
-
-  return np.mean(run_accuracies), np.std(run_accuracies), np.mean(run_psnr), np.std(run_psnr)
-
-    
-
-def runBench(bench_name, dir_prefix):
-
-  Bench = benchmarks[bench_name]
-  binary = Bench.binary
-  accuracy = Bench.accuracy
-
-  createPromiseFile(Bench.loss1_conf)
-  mean, std = testConfidence(binary)
-  print ("mean = ", mean, " std = ", std)
-
-  
-  f = open(dir_prefix + "/" + binary + "_loss1.txt" ,"w+")
-  f.write("mean = " + str(mean) + " std = " + str(std))
-  f.close()
-  
-  createPromiseFile(Bench.loss2_conf)
-  mean, std = testConfidence(binary)
-  print ("mean = ", mean, " std = ", std)
-
-  
-  f = open(dir_prefix + "/" + binary + "_loss2.txt" ,"w+")
-  f.write("mean = " + str(mean) + " std = " + str(std))
-  f.close()
-  
-
-
-  
-
-def gen30dbFile():
-
-  f = open("psnr.txt", "w+");
-  f.write("30");
-  f.close()
-  
-
-def gen20dbFile():
-
-  f = open("psnr.txt", "w+");
-  f.write("20");
-  f.close()
-
-
-
-def runPSNRBench(bench_name, dir_prefix):
-
-  Bench = benchmarks[bench_name]
-  binary = Bench.binary
-
-  gen30dbFile()
-  createPromiseFile(Bench.loss1_conf)
-  mean, std, psnr_mean, psnr_std = testPSNRConfidence(binary, 20)
-  print ("mean = ", mean, " std = ", std)
-
-  
-  f = open(dir_prefix + "/" + binary + "_loss30.txt" ,"w+")
-  f.write("mean = " + str(mean) + " std = " + str(std))
-  f.write("  psnr_mean = " + str(psnr_mean) + " psnr_std = " + str(psnr_std)) 
-  f.close()
-
-  
-  gen20dbFile()
-  createPromiseFile(Bench.loss2_conf)
-  mean, std, psnr_mean, psnr_std = testPSNRConfidence(binary, 20)
-  print ("mean = ", mean, " std = ", std)
-  
-  f = open(dir_prefix + "/" + binary + "_loss20.txt" ,"w+")
-  f.write("mean = " + str(mean) + " std = " + str(std))
-  f.write("  psnr_mean = " + str(psnr_mean) + " psnr_std = " + str(psnr_std)) 
-  f.close()
-  
-
-
-
-  
-
-def runDNNs():
-
-  #runBench("fc4", "avg_accuracies")
-  #runBench("lenet", "avg_accuracies")
-  #runBench("alexnet", "avg_accuracies")
-  #runBench("alexnet2", "avg_accuracies")
-  #runBench("resnet", "avg_accuracies")
-  #runBench("vgg16_cifar10", "avg_accuracies")
-  #runBench("vgg16_cifar100", "avg_accuracies")
-
-  runPSNRBench("pipeline_GEOM", "avg_accuracies")
-  runPSNRBench("pipeline_GEMO", "avg_accuracies")
-  runPSNRBench("pipeline_GEO", "avg_accuracies")
-  runPSNRBench("pipeline_GSM", "avg_accuracies")
-  runPSNRBench("pipeline_GSME", "avg_accuracies")
-
-  
-      
-
-if __name__ == "__main__":
-
-  runDNNs()
-  
-      
diff --git a/llvm/projects/hpvm-tensor-rt/bin/measure_confidence.py b/llvm/projects/hpvm-tensor-rt/bin/measure_confidence.py
deleted file mode 100644
index 74aa23c71aa3e81fc9422a3cc73ba3b69ed98c8a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/measure_confidence.py
+++ /dev/null
@@ -1,125 +0,0 @@
-
-import argparse
-import os
-import subprocess
-import sys
-
-
-def getAccuracy(file_name):
-
-  if not os.path.exists(file_name):
-    print("final_accuracy file not found ")
-    sys.exit(0)
-    
-  file = open(file_name, "r")
-  acc_str = file.read()
-  accuracy = float(acc_str)
-  print accuracy
-  return accuracy  
-
-
-total_runs = 12.0
-skip_lines = 0
-
-
-def test_func():
-  print "test_func"
-  sys.exit(0)
-
-
-def do_multiple_runs(binary_name, accuracy_threshold, confidence_threshold):
-
-  #total_runs = 100.0
-  successful_runs = 0.0
-  total_acc = 0
-
-  for i in range(int(total_runs)):
-    subprocess.call(binary_name)
-    accuracy = getAccuracy("final_accuracy")
-    total_acc += accuracy
-
-    if accuracy > accuracy_threshold:
-      successful_runs += 1
-
-  confidence = (successful_runs / total_runs) * 100.0    
-  print("confidence = ", confidence)    
-  avg_acc = total_acc / total_runs
-  print("average accuracy = ", avg_acc)
-
-  return confidence, avg_acc
-  
-
-def compute_confidence(binary_name, accuracy, confidence, result_dir, output_dir):
-
-  confidence_list = []
-  
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  for file_name in file_names:
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-    
-    f = open(result_dir + "/" + file_name)
-    tuner_file = open("opentuner_flags", "w+")
-
-    index = 0
-    results_str = ""
-    for x in f:
-      if index >= skip_lines:
-        error_knob = int(float(x.split()[1]))
-        print error_knob
-        tuner_file.write(str(error_knob) + "\n")
-
-      results_str += x
-      index += 1
-      
-    tuner_file.close()
-    
-    run_confidence, avg_accuracy = do_multiple_runs(binary, accuracy, confidence)
-
-    if run_confidence > 90:
-      f2 = open(output_dir + "/" + file_name, "w+")
-      f2.write("total_runs=" + str(total_runs) + "\t confidence=" + str(run_confidence) + "\t avg_accuracy=" + str(avg_accuracy) + "\n")
-      f2.write(results_str)
-      f2.close()
-
-    conf_result = (run_confidence, avg_accuracy, file_name)
-    confidence_list.append(conf_result) 
-
-  return confidence_list
-    
-
-if __name__ == "__main__":
-
-  argparser = argparse.ArgumentParser(description='runs best configs to get high confidence on accuracy')
-  argparser.add_argument('--result-dir', help='Directory containing OpenTuner configurations')
-  argparser.add_argument('--output-dir', help='Directory for storing output directory')
-  argparser.add_argument('--binary', help='Binary name to run')
-  argparser.add_argument('--accuracy', type=float,  help='Accuracy constraint')
-  argparser.add_argument('--confidence', type=float, help='Confidence threshold')
-  
-
-  args = argparser.parse_args()
-  result_dir = args.result_dir
-  output_dir = args.output_dir
-  binary = args.binary
-  accuracy = args.accuracy
-  confidence = args.confidence
-
-  confidence_list = compute_confidence(binary, accuracy, confidence, result_dir, output_dir)
-  #print confidence_list
-
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[0], reverse=True)
-   
-  output_file = open(output_dir + "/confidence_summary.txt", "w+")
-  for x in sorted_list:
-    output_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[2]) + "\n")    
-
-  output_file.close()
-  
diff --git a/llvm/projects/hpvm-tensor-rt/bin/mergeTensorOpAndErrors.py b/llvm/projects/hpvm-tensor-rt/bin/mergeTensorOpAndErrors.py
deleted file mode 100644
index 3c9ea9de2854ed133350950d3995f459120176de..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/mergeTensorOpAndErrors.py
+++ /dev/null
@@ -1,60 +0,0 @@
-
-
-
-if __name__ == "__main__":
-
-  dnn_benchs = []
-  dnn_benchs.append("fc4")
-  dnn_benchs.append("lenet_keras")
-  dnn_benchs.append("alexnet_cifar10")
-  dnn_benchs.append("alexnet2_cifar10")
-  dnn_benchs.append("vgg16_cifar10")
-  dnn_benchs.append("vgg16_cifar100")
-  dnn_benchs.append("resnet18_cifar10")
-  dnn_benchs.append("mobilenet")
-  dnn_benchs.append("mobilenet_shallow")
-
-  
-  for bench in dnn_benchs:
-    errors_file1 = "build_tuner/tuner_results/" + bench + "/tensor_errors_1000.txt"    
-    errors_file2 = "build_test/tuner_results/" + bench + "/tensor_composite_errors.txt"    
-    ops_file = "build_tuner/tuner_results/" + bench + "/op_names.txt"    
-
-    f1 = open(errors_file1)
-    f2 = open(errors_file2)
-    f3 = open(ops_file)
-
-    fout = open("build_tuner/tuner_results/" + bench + "/tensor_op_errors.txt", "w+")
-
-    bench_data = []
-    for x in f3:
-      op_name = x.strip()  
-      bench_data.append([op_name, 0.0, 0.0])    
-
-    it = 0
-    for x in f1:
-      if it >= len(bench_data):
-        break
-      toks = x.split()
-      error1 = float(toks[1])
-      print error1
-      bench_data[it][1] = error1
-      it += 1
-
-    it = 0
-    for x in f2:
-      if it >= len(bench_data):
-        break
-      toks = x.split()
-      error2 = float(toks[1])
-      bench_data[it][2] = error2
-      it += 1
-
-    for i in range(len(bench_data)):
-      fout.write(str(i) + "\t" + bench_data[i][0] + "\t" + str(bench_data[i][1]) + "\t" + str(bench_data[i][2]) + "\n")
-
-    fout.close()
-    f1.close()
-    f2.close()
-    f3.close()
-    
diff --git a/llvm/projects/hpvm-tensor-rt/bin/read_weight_ranges.py b/llvm/projects/hpvm-tensor-rt/bin/read_weight_ranges.py
deleted file mode 100644
index c54d7dfcddc161aa20dd8378d2652d32c4905e38..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/read_weight_ranges.py
+++ /dev/null
@@ -1,43 +0,0 @@
-
-
-import numpy as np
-import os
-import struct
-
-
-def read_value_range(file_name):
-
-  print file_name
-  f = open(file_name, "rb")
-
-  bytes = os.stat(file_name).st_size
-  elems = bytes/4
-
-  data_arr = struct.unpack('f'*elems, f.read(4*elems))
-
-  print (np.amin(data_arr))
-  print (np.amax(data_arr))
-
-
-  
-
-if __name__ == "__main__":
-
-  dir_prefix = "model_params/alexnet2_cifar10/"
-  print dir_prefix
-  read_value_range(dir_prefix + "norm_cifar_input.bin")
-  read_value_range(dir_prefix + "conv1.bin")
-  read_value_range(dir_prefix + "conv1_bias.bin")
-  read_value_range(dir_prefix + "conv2.bin")
-  read_value_range(dir_prefix + "conv2_bias.bin")
-  read_value_range(dir_prefix + "conv3.bin")
-  read_value_range(dir_prefix + "conv3_bias.bin")
-  read_value_range(dir_prefix + "conv4.bin")
-  read_value_range(dir_prefix + "conv4_bias.bin")
-  read_value_range(dir_prefix + "conv5.bin")
-  read_value_range(dir_prefix + "conv5_bias.bin")
-  read_value_range(dir_prefix + "conv6.bin")
-  read_value_range(dir_prefix + "conv6_bias.bin")
-  read_value_range(dir_prefix + "fc1.bin")
-  read_value_range(dir_prefix + "fc1_bias.bin")
-      
diff --git a/llvm/projects/hpvm-tensor-rt/bin/replace_half_calls.py b/llvm/projects/hpvm-tensor-rt/bin/replace_half_calls.py
deleted file mode 100644
index b75a7d4750074cf6234151ae21a8bff5af1050d5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/replace_half_calls.py
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
-import sys
-
-
-if __name__ == "__main__":
-
-  if len(sys.argv) < 3:
-    print ("Usage: python replace_half_calls.py  in_file.cc  half_out_file.cc \n")
-    sys.exit(0)
-          
-  file_name = sys.argv[1]
-  out_file_name = sys.argv[2]
-
-  f = open(file_name)
-  str = f.read()
-
-  str = str.replace("tensorConvolution", "tensorHalfConvolution")
-  str = str.replace("tensorAdd", "tensorHalfAdd")
-  str = str.replace("tensorRelu", "tensorHalfRelu")
-  str = str.replace("tensorRelu2", "tensorHalfRelu2")
-  str = str.replace("tensorTanh", "tensorHalfTanh")
-  str = str.replace("tensorPooling", "tensorHalfPooling")
-  str = str.replace("tensorGemmGPU", "tensorHalfGemmGPU")
-  
-  print (str)
-
-  f.close()
-
-  f2 = open(out_file_name, "w+")
-
-  f2.write(str)
-
-  f2.close()
-    
diff --git a/llvm/projects/hpvm-tensor-rt/bin/select_top_results.py b/llvm/projects/hpvm-tensor-rt/bin/select_top_results.py
deleted file mode 100644
index 898b4c4f42211e010b1544039cbd4b4125c03b92..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/select_top_results.py
+++ /dev/null
@@ -1,89 +0,0 @@
-
-
-import argparse
-import sys
-import os
-
-
-log_index = 7
-linear_index = 8
-quad_index = 9
-
-top_k = 10
-
-def dump_results(sorted_list, k, result_dir, sub_dir):
-
-  ref_dir = result_dir + "/" + sub_dir
-  if not os.path.exists(ref_dir):
-    os.mkdir(ref_dir)
-  
-  for i in range(k):
-    file_name = sorted_list[i][1]
-    file_name = ref_dir + "/" + file_name + "_rank_" + str(i)
-    f = open(file_name, "w+")
-    f.write(str(sorted_list[i][2]) + "\t")
-    f.write(str(sorted_list[i][3]) + "\t")
-    f.write(str(sorted_list[i][4]) + "\n")
-    f.write(sorted_list[i][0])
-    f.close()
-
-    
-    
-
-def select_top_results(result_dir):
-
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  results_arr = []
-  
-  for file_name in file_names:
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-
-    log_result = 0.0
-    linear_result = 0.0
-    quad_result = 0.0
-    file_str = ""
-    
-    f = open(result_dir + "/" + file_name)
-    for x in f:
-      words = x.split()
-      log_result += float(words[log_index])
-      linear_result += float(words[linear_index])
-      quad_result += float(words[quad_index])
-      file_str += x 
-      
-
-    file_result = (file_str, file_name, log_result, linear_result, quad_result)          
-    results_arr.append(file_result)    
-
-    
-  sorted_list = sorted(results_arr, key = lambda tup: tup[2])
-  dump_results(sorted_list, top_k, result_dir, "log")
-
-  sorted_list = sorted(results_arr, key = lambda tup: tup[3])
-  dump_results(sorted_list, top_k, result_dir, "linear")
-
-  sorted_list = sorted(results_arr, key = lambda tup: tup[4])
-  dump_results(sorted_list, top_k, result_dir, "quad")
-
-
-
-if __name__ == "__main__":
-
-  argparser = argparse.ArgumentParser(description='runs best configs to get high confidence on accuracy')
-  argparser.add_argument('--result-dir', help='Directory containing OpenTuner configurations')
-
-  args = argparser.parse_args()
-  result_dir = args.result_dir
-
-  select_top_results(result_dir)
-  
-
-    
diff --git a/llvm/projects/hpvm-tensor-rt/bin/setupEnv.sh b/llvm/projects/hpvm-tensor-rt/bin/setupEnv.sh
deleted file mode 100644
index 58f16f20d0af12f041840b8037ae13e49c214ed4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/setupEnv.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-module load cuda-toolkit/8.0
-export CUDNN_PATH=/software/cuda-toolkit-8.0/lib64/
-export LIBRARY_PATH=$LIBRARY_PATH:/software/cuda-toolkit-8.0/lib64/
-
diff --git a/llvm/projects/hpvm-tensor-rt/bin/setup_aws_paths.sh b/llvm/projects/hpvm-tensor-rt/bin/setup_aws_paths.sh
deleted file mode 100644
index d9f092a19f12a91bd588a356fc99744c14deb26a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/setup_aws_paths.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-# CUDNN Path setup
-# module load cuda-toolkit/9.1
-export CUDA_INCLUDE_PATH=/usr/local/cuda/include
-export CUDNN_PATH=/use/local/cuda/lib64/
-export LIBRARY_PATH=/usr/local/cuda/lib64/:$LIBRARY_PATH
-#export LD_LIBRARY_PATH=/usr/local/cuda/lib64/:$LD_LIBRARY_PATH
-
-# HPVM Path setup
-#export CPATH=$CPATH:/home/hsharif3/anaconda2/include/
-#export PATH=/home/hsharif3/Gitlab/hpvm/build/bin/:$PATH
-#export LLVM_BUILD_ROOT=/home/hsharif3/Gitlab/hpvm/build/
-#export LLVM_SRC_ROOT=/home/hsharif3/Gitlab/hpvm/llvm/
diff --git a/llvm/projects/hpvm-tensor-rt/bin/setup_cuda_llvm_paths.sh b/llvm/projects/hpvm-tensor-rt/bin/setup_cuda_llvm_paths.sh
deleted file mode 100644
index 3548f182f198724600aee855b66169a1bdf12a3a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/setup_cuda_llvm_paths.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-# CUDNN Path setup
-module load cuda-toolkit/9.1
-export CUDA_INCLUDE_PATH=/software/cuda-9.1/include
-export CUDNN_PATH=/software/cuda-9.1/lib64/
-export LIBRARY_PATH=/software/cuda-9.1/lib64/:$LIBRARY_PATH
-export LD_LIBRARY_PATH=/software/cuda-9.1/lib64/:$LD_LIBRARY_PATH
-
-# HPVM Path setup
-export CPATH=$CPATH:/home/hsharif3/anaconda2/include/
-export PATH=/home/hsharif3/Gitlab/hpvm/build/bin/:$PATH
-export LLVM_BUILD_ROOT=/home/hsharif3/Gitlab/hpvm/build/
-export LLVM_SRC_ROOT=/home/hsharif3/Gitlab/hpvm/llvm/
diff --git a/llvm/projects/hpvm-tensor-rt/bin/setup_jetson.sh b/llvm/projects/hpvm-tensor-rt/bin/setup_jetson.sh
deleted file mode 100644
index b288ccfe43c577f9ad14c4eb16284539ae5682ea..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/setup_jetson.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-
-export LIBRARY_PATH=$LIBRARY_PATH:/usr/local/cuda-9.0/targets/aarch64-linux/lib/
-export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda-9.0/targets/aarch64-linux/lib/
-export CUDNN_PATH=/usr/local/cuda-9.0/
-export CUDA_INCLUDE_PATH=${CUDNN_PATH}/include
-
-export LLVM_BUILD_ROOT=/home/nvidia/Gitlab/hpvm/build/
-export LLVM_SRC_ROOT=/home/nvidia/Gitlab/hpvm/llvm/
diff --git a/llvm/projects/hpvm-tensor-rt/bin/setup_paths.sh b/llvm/projects/hpvm-tensor-rt/bin/setup_paths.sh
deleted file mode 100644
index 446481b79a47827bf47341ce9d14f15f57d26866..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/setup_paths.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-
-# Setting include path for Anaconda include files
-export CPATH=$CPATH:/home/hsharif3/anaconda2/include/
-# Setting path for llvm/clang-4.0 build
-export PATH=/home/hsharif3/Gitlab/llvm/llvm/build/bin/:$PATH
-
-export LLVM_BUILD_ROOT=/home/hsharif3/Gitlab/hpvm/build/
-
-export LLVM_SRC_ROOT=/home/hsharif3/Gitlab/hpvm/llvm/
diff --git a/llvm/projects/hpvm-tensor-rt/bin/setup_tyler_paths.sh b/llvm/projects/hpvm-tensor-rt/bin/setup_tyler_paths.sh
deleted file mode 100644
index 3548f182f198724600aee855b66169a1bdf12a3a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/setup_tyler_paths.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-# CUDNN Path setup
-module load cuda-toolkit/9.1
-export CUDA_INCLUDE_PATH=/software/cuda-9.1/include
-export CUDNN_PATH=/software/cuda-9.1/lib64/
-export LIBRARY_PATH=/software/cuda-9.1/lib64/:$LIBRARY_PATH
-export LD_LIBRARY_PATH=/software/cuda-9.1/lib64/:$LD_LIBRARY_PATH
-
-# HPVM Path setup
-export CPATH=$CPATH:/home/hsharif3/anaconda2/include/
-export PATH=/home/hsharif3/Gitlab/hpvm/build/bin/:$PATH
-export LLVM_BUILD_ROOT=/home/hsharif3/Gitlab/hpvm/build/
-export LLVM_SRC_ROOT=/home/hsharif3/Gitlab/hpvm/llvm/
diff --git a/llvm/projects/hpvm-tensor-rt/bin/swing_selection.py b/llvm/projects/hpvm-tensor-rt/bin/swing_selection.py
deleted file mode 100644
index b5c484a23029f97218500571ebb8bcafc718d430..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/swing_selection.py
+++ /dev/null
@@ -1,304 +0,0 @@
-
-
-import os
-import warnings
-import matplotlib.pyplot as plt
-import matplotlib.cm as cm
-from matplotlib.ticker import MultipleLocator
-import numpy as np
-from scipy.signal import savgol_filter
-import math
-import struct
-
-
-
-def readDataFromText(textFile):
-    results = []
-    with open(textFile, "r") as f:
-        for line in f:
-            token = line.split("\t")
-            if (len(token) < 7):
-                continue
-            record = (token[0], float(token[1]), float(token[5]), float(token[6]))
-            results.append(record)
-    return results
-
-
-convL1bins =  [(0.985901, 1.36474), (0.852871, 1.16982), (0.422283, 0.55701), (0.259752, 0.335259), (0.216577, 0.277843), (0.185812, 0.23733), (0.148996, 0.189171), (0.100007, 0.125816), (0.0003127876261714846, 0.014511194080114365)]
-convL2bins =  [(0.995298, 1.3643), (0.861066, 1.16279), (0.426857, 0.547827), (0.262645, 0.330186), (0.218984, 0.273731), (0.187878, 0.233872), (0.150619, 0.186512), (0.10106, 0.124477), (0.00035427528200671077, 0.020199092105031013)]
-
-biasL1bins = [(0.3510325849056244, 0.49078235030174255), (0.30895063281059265, 0.4311973750591278), (0.16023841500282288, 0.22283604741096497), (0.099583700299263, 0.1381179839372635), (0.08340170979499817, 0.11503150314092636), (0.07280077040195465, 0.09948030859231949), (0.05857400223612785, 0.07965542376041412), (0.04044099152088165, 0.054193537682294846), (0.0, 0.0)]
-biasL2bins = [(0.4154910147190094, 0.5820578932762146), (0.3656001389026642, 0.5121639370918274), (0.18930286169052124, 0.2637346684932709), (0.11687946319580078, 0.16306844353675842), (0.09796475619077682, 0.13558265566825867), (0.0848352462053299, 0.11619425565004349), (0.06783176958560944, 0.09277229756116867), (0.046059850603342056, 0.062238890677690506), (0.0, 0.0)]
-
-gemmL1bins=  [(0.711203, 0.772211), (0.625894, 0.679601), (0.322665, 0.350383), (0.199646, 0.216727), (0.166556, 0.180781), (0.142945, 0.155132), (0.114662, 0.124399), (0.0771065, 0.0835984), (0.00034660729579627514, 0.008546584285795689)]
-gemmL2bins=  [(0.715208, 0.768102), (0.629411, 0.675947), (0.324433, 0.348358), (0.200659, 0.21539), (0.167381, 0.179634), (0.143637, 0.154119), (0.115197, 0.123548), (0.0774642, 0.0829647), (0.0003496285935398191, 0.009841435588896275)]
-
-
-
-def findBinByOp(op):
-    if op == 'tensorConv':
-        return convL1bins, convL2bins
-    if op == 'tensorAdd':
-        return biasL1bins, biasL2bins
-    if op == 'tensorGemm':
-        return gemmL1bins, gemmL2bins
-
-    return None, None
-
-
-def getSwing(Lx, opLxbin):
-    if opLxbin == None:
-        return 0
-    for i, (minT, maxT) in enumerate(opLxbin):
-        if Lx > minT:
-            return i
-
-    return 9
-
-
-
-def getConfiguration(L_thresholds):
-    configuration = []
-    for l in L_thresholds:
-        # L0 is op_type
-        opL1bin, opL2bin = findBinByOp(l[0])
-        # NOTE: L2 is L1 error, L3 is L2 error
-        sL1 = getSwing(l[2], opL1bin)
-        sL2 = getSwing(l[3], opL2bin)
-        if sL1 < 7:
-            sL1 = sL1 + 1
-        if sL2 < 7:
-            sL2 = sL2 + 1
-        configuration.append((l[0], l[1], l[2], l[3], sL1, sL2, max(sL1, sL2)))
-
-    return configuration
-
-
-def displayConfig(config):
-    for c in config:
-        print(c)
-
-def displayMultipleConfigurations(configurations):
-    for f, c in configurations.items():
-        print(f)
-        displayConfig(c)
-        print()
-
-def getConfigFromFile(filename):
-    L_requirements = readDataFromText(filename)
-    config = getConfiguration(L_requirements)
-    return config
-
-
-def getConfigurationsFromDir(dirname):
-    configurations = dict()
-    for f in os.listdir(dirname):
-        configurations[f] = getConfigFromFile(os.path.join(dirname, f))
-
-    return configurations
-              
-
-def getLayerWiseTarget(config):
-    target = []
-    for i, op in enumerate(config):
-        if (op[0] == 'tensorGemm') or (op[0] == 'tensorConv'):
-            t = op[6]
-            for j in range(i+1, len(config)):
-                if config[j][0] == 'tensorGemm' or config[j][0] == 'tensorConv':
-                    break
-                t = max(t, config[j][6])
-            target.append(t)
-            t = 0
-
-    return target
-
-
-def dumpLayerWiseTarget(file, targets):
-    with open(file, "w") as f:
-        for name, t in targets.items():
-            f.write(name)
-            f.write(" ")
-            for i in t:
-                f.write(str(i))
-                f.write(" ")
-            f.write("\n")
-
-
-def getTargetsFromConfigurations(configs):
-    targets = dict()
-    for f, c in configs.items():
-        targets[f] = [d[6] for d in c]
-
-    return targets
-                
-
-def dumpBenchmarkTargets(name, benchmark_dir):
-    benchmark_targets = dict()
-    error = ['linear', 'log', 'quad']
-    for e in error:
-        results_dir = os.path.join(benchmark_dir, e)
-        configs = getConfigurationsFromDir(results_dir)
-        benchmark_targets[e] = getTargetsFromConfigurations(configs)
-
-    return benchmark_targets
-
-
-
-def dumpTargets(filename, targets):
-    with open(filename, "w") as f:
-        for e, file_configs in targets.items():
-            for name, config in file_configs.items():
-                for c in config:
-                    f.write(str(c))
-                    f.write(" ")
-                f.write("\n")
-
-
-                
-def getLayerSwings(layer_desc, configurations):
-
-    layer_swings = []
-    for i in range(len(configurations)):
-      config_vals = configurations[i]
-      if len(config_vals) == 0:
-        continue
-      
-      layer_index = 0
-      index = 0
-      swing_vals = []
-                   
-      while layer_index < len(layer_desc):
-        if len(layer_desc[layer_index]) == 1:
-          promise_swing = config_vals[index]
-          layer_type = layer_desc[layer_index][0]
-          layer_type = layer_type.strip()
-          print ("****layer_type = ", layer_type)
-          if layer_type != "conv" and layer_type != "dense":
-            promise_swing = -9
-          if layer_type == "depthwise_conv":
-            promise_swing = 9  
-          index += 1
-        else:
-          #print ("index = ", index)
-          # FIXIT: Doesn't look right
-          print (config_vals[index], config_vals[index+1])
-          promise_swing = max(config_vals[index], config_vals[index+1])                  
-          stride = len(layer_desc[layer_index])
-          index += stride
-          
-        swing_vals.append(promise_swing)
-        layer_index += 1  
-        
-      layer_swings.append(swing_vals)
-
-    return layer_swings
-
-                   
-                
-
-def loadLayerDesc(layer_desc_file):
-
-    layer_desc = []
-    f = open(layer_desc_file)
-    for x in f:
-      vals = x.split()
-      layer_desc.append(vals)
-
-    return layer_desc
-      
-
-
-def dumpLayerTargets(targets, tuned_result_dir, layer_desc_file):
-
-    layer_desc = loadLayerDesc(layer_desc_file)
-    print (layer_desc)
-
-    file_names = []
-    configurations = []
-    for e, file_configs in targets.items():
-      for name, config in file_configs.items():
-        config_vals = []  
-        for c in config:
-          config_vals.append(c)         
-        print (config_vals)
-
-        configurations.append(config_vals)
-
-        rank = e + "_" +  "_".join(name.split("_")[-2:])
-        file_names.append(rank)
-        
-        
-    # NOTE: get PROMISE swing values corresponding to each layer
-    layer_swings = getLayerSwings(layer_desc, configurations)
-
-    targets_file_path = tuned_result_dir + "/layer_targets.txt"
-    f = open(targets_file_path, "w+")
-
-    for config in layer_swings:
-      index = 0
-      for swing in config:
-        swing_str = ""
-        if swing == 8 or swing == 9:
-          layer_size = len(layer_desc[index])
-          for i in range(layer_size):
-            swing_str += str(swing)
-            if i < layer_size - 1:
-              swing_str += " "
-        elif swing == -9:
-          swing_str += "8"                   
-        else:
-          swing_str += str(swing)
-
-        if index < len(config) - 1:
-          swing_str += ","    
-          
-        f.write(swing_str)
-        index += 1
-        
-      f.write("\n")
-        
-    f.close()
-    
-    print(layer_swings)    
-    return layer_swings, file_names
-
-
-
-def replaceFirstLayer(layer_swings):
-
-  # Ensuring first conv on GPU
-  for conf in layer_swings:
-    conf[0] = 9
-    
-    
-    
-def computeLayerTargets(tuned_result_dir, layer_desc_file):
-
-    targets_file_path = tuned_result_dir + "/tensor_targets.txt"
-    targets = dumpBenchmarkTargets(targets_file_path, tuned_result_dir)
-
-    dumpTargets(targets_file_path, targets)
-    
-    layer_swings, file_names = dumpLayerTargets(targets, tuned_result_dir, layer_desc_file)
-
-    replaceFirstLayer(layer_swings)
-    
-    return layer_swings, file_names
-    
-
-# Externally-called function    
-def compute_swing_selection(tuned_result_dir, layer_file):
-   
-    return computeLayerTargets(tuned_result_dir, layer_file)
-
-                            
-        
-                
-if __name__ == "__main__":
-
-    tuned_result_dir = "./vgg16_cifar10_tuner_1/high_confidence/"
-    layer_file = "layer_composition.txt"
-
-    tuned_result_dir = "./resnet18_cifar10_tuner_1/high_confidence/"
-    layer_file = "layer_composition2.txt"
-    computeLayerTargets(tuned_result_dir, layer_file)
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tensor_inline.sh b/llvm/projects/hpvm-tensor-rt/bin/tensor_inline.sh
deleted file mode 100755
index f67f22ebad5352d99238addd26d9e1b568ee2125..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tensor_inline.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-clang-4.0 -emit-llvm tensor_cpu_runtime.cc -S -o tensor_cpu_runtime.ll
-opt-4.0 -always-inline tensor_cpu_runtime.ll -S -o tensor_cpu_runtime.ll
diff --git a/llvm/projects/hpvm-tensor-rt/bin/times.py b/llvm/projects/hpvm-tensor-rt/bin/times.py
deleted file mode 100644
index 082b0d91acb19e70a6c217b25f8747f3197b45b7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/times.py
+++ /dev/null
@@ -1,78 +0,0 @@
-
-
-
-class Config:
-  def __init__(self):
-    self.runtime = 0
-    self.fed_runs = 0
-    self.full_runs = 0
-    
-
-def computeTimes(bench):
-
-  conf_runs = 60
-  fed_time = (bench.runtime * 100) + (bench.fed_runs * conf_runs * bench.runtime)
-  fed_time_hrs = fed_time / (60*60)
-  
-  full_time = (bench.runtime * 1000) + (bench.full_runs * conf_runs * bench.runtime)
-  full_time_hrs = full_time / (60*60)
-    
-  print ("fedtime_hrs = ", fed_time_hrs, " full_time_hrs = ", full_time_hrs, "\n")
-  
-  
-
-if __name__ == "__main__":
-    
-
-  resnet = Config()
-  resnet.runtime = 8
-  resnet.fed_runs = 3
-  resnet.full_runs = 5
-
-  computeTimes(resnet)
-
-  alexnet = Config()
-  alexnet.runtime = 7.8
-  alexnet.fed_runs = 47
-  alexnet.full_runs = 274
-
-  computeTimes(alexnet)
-
-  alexnet2 = Config()
-  alexnet2.runtime = 2.3
-  alexnet2.fed_runs = 62
-  alexnet2.full_runs = 339
-
-  computeTimes(alexnet2)
-
-  vgg1 = Config()
-  vgg1.runtime = 7.4
-  vgg1.fed_runs = 15
-  vgg1.full_runs = 211
-
-  computeTimes(vgg1)
-  
-
-  vgg2 = Config()
-  vgg2.runtime = 15.4
-  vgg2.fed_runs = 8
-  vgg2.full_runs = 150
-
-  computeTimes(vgg2)
-  
-  
-  lenet = Config()
-  lenet.runtime = 0.98
-  lenet.fed_runs = 64
-  lenet.full_runs = 228
-
-  computeTimes(lenet)
-  
-  
-  mobilenet = Config()
-  mobilenet.runtime = 11
-  mobilenet.fed_runs = 32
-  mobilenet.full_runs = 267
-
-  computeTimes(mobilenet)
-
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/benchmarks.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/benchmarks.py
deleted file mode 100644
index fada8ddf641068f4622f48cbe816c5a6f9ff7843..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/benchmarks.py
+++ /dev/null
@@ -1,483 +0,0 @@
-
-# NOTES: Batch13 (Baseline) Batch14 - With ErrorSens (10, 25, 35)
-
-# Batch 9: No Error Sens. Min : P3
-# Batch 10: No Error Sens + More Runs for Loss1 and Loss2.  Min: P3
-# Batch 11: Error Sens: Skipping 30% elems in each : Min: P3. More runs in Loss1 (4000) and Loss2 (2000)
-# Batch 12: Error Sens: 10, 25, 35, for Loss1, 2, 3, respectively, Min: P3. 1000 Runs for All
-# Batch 13: No Error Sens: Equal Runs (1000) for all. Min: P1
-# Batch 14: Reruning Batch12 with bugFix!
-# Batch 15: MAJOR CHANGE: 3 different skip levels for each Loss1,Loss2,Loss3
-
-# Batch 18: Batch13 (Basline) + ParetoCurve (1500 Runs) - BUGGY IGNORE!!!
-
-# Batch 19: (Basline) + ParetoCurve + 2 runs in Tuning Phase (1500 Runs)
-
-# Batch 20: 3-Skip levels + + 2 runs + 1500 Runs + EnergyBandSize now % of Max (Compare against Batch19
-
-
-# Batch 200: AlgoTuner - 1000 images - 1500 runs (IGNORE)
-# Batch 201: AlgoTuner - 2000 images - 1500 runs
-# Batch 202: AlgoTuner - 2000 images - 500 runs
-# Batch 203: AlgoTuner - 2000 images - 3000 runs
-
-
-#---- CHANGES: i) Reshufled inputs ii) 3K images for tuning
-# Batch 210: 3K images, 1000 runs (1500 resnet), no FP32 used in tuning
-# Batch 211: Same as Batch-210 + uses tensorConvPerfCuda*Half*
-
-
-#batch_id = "batch210"
-#batch_id = "batch211"
-batch_id = "batch210"
-
-
-class Benchmark:
-  def __init__(self):
-    self.tuner_binary = ""
-    self.promise_binary = ""
-    self.tuner_accuracy = 0
-    self.promise_accuracy = 0
-    self.num_flags = 0
-    self.num_layers = 0
-    self.autotuner_runs = 0
-    self.error_range_1 = 0
-    self.error_range_2 = 0
-    self.result_dir_1 = ""
-    self.result_dir_2 = ""
-    self.promise_result_dir_1 = ""
-    self.promise_result_dir_2 = ""
-
-    
-
-bench_tuner_data = {}
-
-# FIXIT: Fix Variable Names below
-Alexnet1 = Benchmark()
-Alexnet1.tuner_binary = "alexnet_cifar10_tuner"
-Alexnet1.promise_binary = "alexnet_promise"
-Alexnet1.validation_binary = "alexnet_valid"
-Alexnet1.num_flags = 21
-Alexnet1.num_layers = 6
-Alexnet1.error_range_1 = 10
-Alexnet1.error_range_2 = 13
-Alexnet1.start_promise_range = 1
-Alexnet1.skip_layers = 0
-#Alexnet1.skip_layer_str = "0"
-Alexnet1.skip_layer_str = "5_0"
-
-Alexnet1.base_dir = "../build_tuner/tuner_results/alexnet_cifar10/"
-Alexnet1.result_dir_1 = "../build_tuner/tuner_results/alexnet_cifar10/loss_1/" + batch_id
-Alexnet1.result_dir_2 = "../build_tuner/tuner_results/alexnet_cifar10/loss_2/" + batch_id
-Alexnet1.result_dir_3 = "../build_tuner/tuner_results/alexnet_cifar10/loss_3/" + batch_id
-
-Alexnet1.tensor_desc_file = "tuner_results/alexnet_cifar10/alexnet_tensors.txt"
-Alexnet1.layer_file = "tuner_results/alexnet_cifar10/alexnet_layers.txt"
-Alexnet1.cost_file = "../build_tuner/tuner_results/alexnet_cifar10/op_cost.txt"
-Alexnet1.layer_knobs = "../opentuner/data/alexnet/knobs.txt"
-
-#Alexnet1.loss1_result_file = "tuner_results/alexnet2_cifar10/alexnet_layers.txt"
-Alexnet1.loss1_result_file = "tuner_results/alexnet_cifar10/loss_1/promise_tuned_confs/promise_confs.txt"
-Alexnet1.loss2_result_file = "tuner_results/alexnet_cifar10/loss_2/promise_tuned_confs/promise_confs.txt"
-
-Alexnet1.autotuner_runs = 1000
-Alexnet1.tuner_accuracy = 79.9
-#Alexnet1.promise_accuracy = 79.9
-Alexnet1.promise_accuracy = 78.86
-Alexnet1.validation_accuracy = 79.19
-
-bench_tuner_data["alexnet_cifar10"] = Alexnet1
-
-
-Alexnet2 = Benchmark()
-Alexnet2.tuner_binary = "alexnet2_cifar10_tuner"
-Alexnet2.promise_binary = "alexnet2_promise"
-Alexnet2.validation_binary = "alexnet2_valid"
-Alexnet2.num_flags = 23
-Alexnet2.num_layers = 7
-Alexnet2.error_range_1 = 10
-Alexnet2.error_range_2 = 13
-Alexnet2.start_promise_range = 1
-#Alexnet2.skip_layer_str = "0"
-Alexnet2.skip_layer_str = "6_1_0"
-
-Alexnet2.base_dir = "../build_tuner/tuner_results/alexnet2_cifar10/"
-Alexnet2.result_dir_1 = "../build_tuner/tuner_results/alexnet2_cifar10/loss_1/" + batch_id
-Alexnet2.result_dir_2 = "../build_tuner/tuner_results/alexnet2_cifar10/loss_2/" + batch_id
-Alexnet2.result_dir_3 = "../build_tuner/tuner_results/alexnet2_cifar10/loss_3/" + batch_id
-Alexnet2.tensor_desc_file = "tuner_results/alexnet2_cifar10/alexnet2_tensors.txt"
-Alexnet2.layer_file = "tuner_results/alexnet2_cifar10/alexnet2_layers.txt"
-Alexnet2.cost_file = "../build_tuner/tuner_results/alexnet2_cifar10/op_cost.txt"
-Alexnet2.layer_knobs = "../opentuner/data/alexnet2/knobs.txt"
-#Alexnet2.loss1_result_file = "tuner_results/alexnet2_cifar10/loss_1/promise_tuned_confs/promise_confs.txt"
-#Alexnet2.loss2_result_file = "tuner_results/alexnet2_cifar10/loss_2/promise_tuned_confs/promise_confs.txt"
-Alexnet2.autotuner_runs = 1000
-Alexnet2.tuner_accuracy = 84.19
-#Alexnet2.promise_accuracy = 84.19
-Alexnet2.promise_accuracy = 84.7
-Alexnet2.validation_accuracy = 85.15
-
-bench_tuner_data["alexnet2_cifar10"] = Alexnet2
-
-
-
-Alexnet3 = Benchmark()
-Alexnet3.tuner_binary = "vgg16_cifar10_tuner"
-Alexnet3.promise_binary = "./vgg16_cifar10_promise"
-Alexnet3.validation_binary = "vgg16_cifar10_valid"
-Alexnet3.num_flags = 50
-Alexnet3.num_layers = 15
-Alexnet3.error_range_1 = 9
-Alexnet3.error_range_2 = 11
-Alexnet3.start_promise_range = 1
-#Alexnet3.skip_layer_str = "0"
-Alexnet3.skip_layer_str = "14_3_4_1_6"
-
-Alexnet3.base_dir = "../build_tuner/tuner_results/vgg16_cifar10/"
-Alexnet3.result_dir_1 = "../build_tuner/tuner_results/vgg16_cifar10/loss_1/" + batch_id
-Alexnet3.result_dir_2 = "../build_tuner/tuner_results/vgg16_cifar10/loss_2/" + batch_id
-Alexnet3.result_dir_3 = "../build_tuner/tuner_results/vgg16_cifar10/loss_3/" + batch_id
-
-Alexnet3.tensor_desc_file = "tuner_results/vgg16_cifar10/vgg16_tensors.txt"
-Alexnet3.layer_file = "tuner_results/vgg16_cifar10/vgg16_layers.txt"
-Alexnet3.cost_file = "../build_tuner/tuner_results/vgg16_cifar10/op_cost.txt"
-Alexnet3.layer_knobs = "../opentuner/data/vgg16_cifar10/knobs.txt"
-
-Alexnet3.loss1_result_file = "tuner_results/vgg16_cifar10/loss_1/promise_tuned_confs/promise_confs.txt"
-Alexnet3.loss2_result_file = "tuner_results/vgg16_cifar10/loss_2/promise_tuned_confs/promise_confs.txt"
-
-Alexnet3.autotuner_runs = 1000
-Alexnet3.tuner_accuracy = 90.19
-#Alexnet3.promise_accuracy = 90.19
-Alexnet3.promise_accuracy = 88.53
-Alexnet3.validation_accuracy = 89.05
-
-bench_tuner_data["vgg16_cifar10"] = Alexnet3
-
-
-
-Alexnet4 = Benchmark()
-Alexnet4.tuner_binary = "resnet18_cifar10_tuner"
-Alexnet4.promise_binary = "resnet18_promise"
-Alexnet4.validation_binary = "resnet18_valid"
-Alexnet4.num_flags = 73
-Alexnet4.num_layers = 22
-Alexnet4.error_range_1 = 7
-Alexnet4.error_range_2 = 9
-Alexnet4.start_promise_range = 1
-#Alexnet4.skip_layer_str = "0"
-Alexnet4.skip_layer_str = "0_1_2_14_15_17_18_21"
-Alexnet4.base_dir = "../build_tuner/tuner_results/resnet18_cifar10/"
-Alexnet4.result_dir_1 = "../build_tuner/tuner_results/resnet18_cifar10/loss_1/" + batch_id
-Alexnet4.result_dir_2 = "../build_tuner/tuner_results/resnet18_cifar10/loss_2/" + batch_id
-Alexnet4.result_dir_3 = "../build_tuner/tuner_results/resnet18_cifar10/loss_3/" + batch_id
-Alexnet4.tensor_desc_file = "tuner_results/resnet18_cifar10/resnet_tensors.txt"
-Alexnet4.layer_file = "tuner_results/resnet18_cifar10/resnet_layers.txt"
-Alexnet4.cost_file = "../build_tuner/tuner_results/resnet18_cifar10/op_cost.txt"
-Alexnet4.layer_knobs = "../opentuner/data/resnet/knobs.txt"
-
-Alexnet4.loss1_result_file = "tuner_results/resnet18_cifar10/loss_1/promise_tuned_confs/promise_confs.txt"
-Alexnet4.loss2_result_file = "tuner_results/resnet18_cifar10/loss_2/promise_tuned_confs/promise_confs.txt"
-
-Alexnet4.autotuner_runs = 1500
-Alexnet4.tuner_accuracy = 89.6
-#Alexnet4.promise_accuracy = 89.59  - 1000 images
-Alexnet4.promise_accuracy = 89.5
-Alexnet4.validation_accuracy = 89.65
-
-bench_tuner_data["resnet18_cifar10"] = Alexnet4
-
-
-
-
-
-Alexnet5 = Benchmark()
-Alexnet5.tuner_binary = "vgg16_cifar100_tuner"
-Alexnet5.promise_binary = "vgg16_cifar100_promise"
-Alexnet5.validation_binary = "vgg16_cifar100_valid"
-Alexnet5.num_flags = 50
-Alexnet5.num_layers = 15
-Alexnet5.error_range_1 = 9
-Alexnet5.error_range_2 = 11
-Alexnet5.start_promise_range = 1
-#Alexnet5.skip_layer_str = "0"
-Alexnet5.skip_layer_str = "0_1_2_3_4"
-Alexnet5.base_dir = "../build_tuner/tuner_results/vgg16_cifar100/"
-Alexnet5.result_dir_1 = "../build_tuner/tuner_results/vgg16_cifar100/loss_1/" + batch_id
-Alexnet5.result_dir_2 = "../build_tuner/tuner_results/vgg16_cifar100/loss_2/" + batch_id
-Alexnet5.result_dir_3 = "../build_tuner/tuner_results/vgg16_cifar100/loss_3/" + batch_id
-
-Alexnet5.tensor_desc_file = "../build_tuner/tuner_results/vgg16_cifar100/vgg16_tensors.txt"
-Alexnet5.layer_file = "../build_tuner/tuner_results/vgg16_cifar100/vgg16_layers.txt"
-Alexnet5.cost_file = "../build_tuner/tuner_results/vgg16_cifar100/op_cost.txt"
-Alexnet5.layer_knobs = "../opentuner/data/vgg16_cifar100/knobs.txt"
-
-Alexnet5.loss1_result_file = "tuner_results/vgg_cifar100/loss_1/promise_tuned_confs/promise_confs.txt"
-Alexnet5.loss2_result_file = "tuner_results/vgg_cifar100/loss_2/promise_tuned_confs/promise_confs.txt"
-Alexnet5.autotuner_runs = 1000
-Alexnet5.tuner_accuracy = 67.95
-#Alexnet5.promise_accuracy = 66.8
-Alexnet5.promise_accuracy = 67.86
-Alexnet5.validation_accuracy = 68.65
-
-bench_tuner_data["vgg16_cifar100"] = Alexnet5
-
-
-
-Alexnet6 = Benchmark()
-Alexnet6.tuner_binary = "lenet_keras"
-Alexnet6.promise_binary = "lenet_promise"
-Alexnet6.validation_binary = "lenet_promise"
-#Alexnet6.validation_binary = "lenet_keras_valid"
-Alexnet6.num_flags = 14
-Alexnet6.num_layers = 4
-Alexnet6.error_range_1 = 16
-Alexnet6.error_range_2 = 20
-Alexnet6.start_promise_range = 1
-Alexnet6.skip_layer_str = "0"
-
-Alexnet6.base_dir = "../build_tuner/tuner_results/lenet_keras/"
-Alexnet6.result_dir_1 = "../build_tuner/tuner_results/lenet_keras/loss_1/" + batch_id
-Alexnet6.result_dir_2 = "../build_tuner/tuner_results/lenet_keras/loss_2/" + batch_id
-Alexnet6.result_dir_3 = "../build_tuner/tuner_results/lenet_keras/loss_3/" + batch_id
-
-Alexnet6.tensor_desc_file = "tuner_results/lenet_keras/lenet_tensors.txt"
-Alexnet6.layer_file = "tuner_results/lenet_keras/lenet_layers.txt"
-Alexnet6.cost_file = "../build_tuner/tuner_results/lenet_keras/op_cost.txt"
-Alexnet6.layer_knobs = "../opentuner/data/lenet/knobs.txt"
-
-#Alexnet6.loss1_result_file = "tuner_results/vgg_cifar100/loss_1/promise_tuned_confs/promise_confs.txt"
-#Alexnet6.loss2_result_file = "tuner_results/vgg_cifar100/loss_2/promise_tuned_confs/promise_confs.txt"
-Alexnet6.autotuner_runs = 1000
-Alexnet6.tuner_accuracy = 98.9
-Alexnet6.promise_accuracy = 99.7
-Alexnet6.validation_accuracy = 99
-
-bench_tuner_data["lenet_keras"] = Alexnet6
-
-
-
-
-Alexnet7 = Benchmark()
-Alexnet7.tuner_binary = "mobilenet_cifar10"
-#Alexnet7.promise_binary = "mobilenet_quant"
-Alexnet7.promise_binary = "mobilenet_promise"
-Alexnet7.validation_binary = "mobilenet_valid"
-Alexnet7.num_flags = 85
-Alexnet7.num_layers = 15
-Alexnet7.error_range_1 = 7
-Alexnet7.error_range_2 = 8
-Alexnet7.start_promise_range = 1
-#Alexnet7.skip_layer_str = "0"
-Alexnet7.skip_layer_str = "1_14_0_6_2"
-Alexnet7.base_dir = "../build_tuner/tuner_results/mobilenet/"
-Alexnet7.result_dir_1 = "../build_tuner/tuner_results/mobilenet/loss_1/" + batch_id
-Alexnet7.result_dir_2 = "../build_tuner/tuner_results/mobilenet/loss_2/" + batch_id
-Alexnet7.result_dir_3 = "../build_tuner/tuner_results/mobilenet/loss_3/" + batch_id
-
-Alexnet7.tensor_desc_file = "tuner_results/mobilenet/mobilenet_ops.txt"
-Alexnet7.layer_file = "tuner_results/mobilenet/mobilenet_layer_comp.txt"
-Alexnet7.cost_file = "../build_tuner/tuner_results/mobilenet/op_cost.txt"
-Alexnet7.layer_knobs = "../opentuner/data/mobilenet/knobs.txt"
-
-#--- Files below needed for VALIDATION experiment
-Alexnet7.loss1_result_file = "tuner_results/mobilenet/loss_1/batch1/promise_tuner/high_confidence/promise_confs.txt"
-Alexnet7.loss2_result_file = "tuner_results/mobilenet/loss_2/batch1/promise_tuner/high_confidence/promise_confs.txt"
-Alexnet7.autotuner_runs = 1000
-Alexnet7.tuner_accuracy = 84.8
-#Alexnet7.promise_accuracy = 84.8
-Alexnet7.promise_accuracy = 83.73
-Alexnet7.validation_accuracy = 84.4
-
-bench_tuner_data["mobilenet_cifar10"] = Alexnet7
-
-
-
-Alexnet8 = Benchmark()
-Alexnet8.tuner_binary = "mobilenet_cifar10_shallow"
-Alexnet8.promise_binary = "mobilenet_shallow_promise"
-Alexnet8.validation_binary = "mobilenet_shallow_valid"
-Alexnet8.num_flags = 42
-Alexnet8.num_layers = 8
-Alexnet8.error_range_1 = 10
-Alexnet8.error_range_2 = 12
-Alexnet8.start_promise_range = 1
-#Alexnet8.skip_layer_str = "0"
-Alexnet8.skip_layer_str = "7_0_1"
-Alexnet8.base_dir = "../build_tuner/tuner_results/mobilenet_shallow/"
-Alexnet8.result_dir_1 = "../build_tuner/tuner_results/mobilenet_shallow/loss_1/" + batch_id
-Alexnet8.result_dir_2 = "../build_tuner/tuner_results/mobilenet_shallow/loss_2/" + batch_id
-Alexnet8.result_dir_3 = "../build_tuner/tuner_results/mobilenet_shallow/loss_3/" + batch_id
-
-Alexnet8.tensor_desc_file = "../build_tuner/tuner_results/mobilenet_shallow/mobilenet_shallow_ops.txt"
-Alexnet8.layer_file = "../build_tuner/tuner_results/mobilenet_shallow/mobilenet_shallow_layer_comp.txt"
-Alexnet8.cost_file = "../build_tuner/tuner_results/mobilenet_shallow/op_cost.txt"
-Alexnet8.layer_knobs = "../opentuner/data/mobilenet_shallow/knobs.txt"
-
-Alexnet8.loss1_result_file = "../build_tuner/tuner_results/mobilenet_shallow/loss_1/batch2/promise_tuner/high_confidence/promise_selected_confs.txt"
-Alexnet8.loss2_result_file = "../build_tuner/tuner_results/mobilenet_shallow/loss_2/batch2/promise_tuner/high_confidence/promise_selected_confs.txt"
-
-Alexnet8.autotuner_runs = 1000
-Alexnet8.tuner_accuracy = 87.6
-#Alexnet8.promise_accuracy = 87.59
-Alexnet8.promise_accuracy = 87.76
-Alexnet8.validation_accuracy = 88.5
-
-bench_tuner_data["mobilenet_shallow"] = Alexnet8
-
-
-
-"""
-Alexnet9 = Benchmark()
-Alexnet9.tuner_binary = "fc4_clipped"
-Alexnet9.promise_binary = ""
-Alexnet9.validation_binary = ""
-Alexnet9.num_flags = 12
-Alexnet9.num_layers = 4
-Alexnet9.error_range_1 = 12
-Alexnet9.error_range_2 = 16 
-Alexnet9.start_promise_range = 3
-Alexnet9.skip_layer_str = "0"
-Alexnet9.base_dir = "../build_tuner/tuner_results/fc4/"
-Alexnet9.result_dir_1 = "../build_tuner/tuner_results/fc4/loss1/batch1"
-Alexnet9.result_dir_2 = "../build_tuner/tuner_results/fc4/loss2/batch1"
-Alexnet9.tensor_desc_file = ""
-Alexnet9.layer_file = ""
-
-Alexnet9.loss1_result_file = ""
-Alexnet9.loss2_result_file = ""
-
-Alexnet9.autotuner_runs = 1000
-Alexnet9.tuner_accuracy = 93.8
-Alexnet9.promise_accuracy = 0.0
-Alexnet9.validation_accuracy = 0.0
-
-bench_tuner_data["fc4"] = Alexnet9
-
-
-
-
-Pipeline1 = Benchmark()
-Pipeline1.tuner_binary = "pipeline_GEOM"
-Pipeline1.promise_binary = "pipeline_GEOM_promise"
-Pipeline1.validation_binary = "pipeline_GEOM_valid"
-Pipeline1.num_flags = 9
-Pipeline1.num_layers = 4
-Pipeline1.error_range_1 = 10
-Pipeline1.error_range_2 = 15
-Pipeline1.start_promise_range = 2
-Pipeline1.skip_layer_str = "1_2"
-Pipeline1.result_dir_1 = "tuner_results/pipeline_GEOM/loss_30/batch1"
-Pipeline1.result_dir_2 = "tuner_results/pipeline_GEOM/loss_20/batch1"
-Pipeline1.tensor_desc_file = "tuner_results/pipeline_GEOM/pipeline_GEOM_tensors.txt"
-Pipeline1.layer_file = "tuner_results/pipeline_GEOM/pipeline_GEOM_layers.txt"
-#Pipeline1.loss1_result_file = "tuner_results/vgg_cifar100/loss_1/promise_tuned_confs/promise_confs.txt"
-#Pipeline1.loss2_result_file = "tuner_results/vgg_cifar100/loss_2/promise_tuned_confs/promise_confs.txt"
-Pipeline1.autotuner_runs = 300
-Pipeline1.tuner_accuracy = 95
-Pipeline1.promise_accuracy = 95
-Pipeline1.validation_accuracy = 95
-
-bench_tuner_data["pipeline_GEOM"] = Pipeline1
-
-
-Pipeline2 = Benchmark()
-Pipeline2.tuner_binary = "pipeline_GEMO"
-Pipeline2.promise_binary = "pipeline_GEMO_promise"
-Pipeline2.validation_binary = "pipeline_GEMO_valid"
-Pipeline2.num_flags = 9
-Pipeline2.num_layers = 4
-Pipeline2.error_range_1 = 10
-Pipeline2.error_range_2 = 15
-Pipeline2.start_promise_range = 2
-Pipeline2.skip_layer_str = "1_3"
-Pipeline2.result_dir_1 = "tuner_results/pipeline_GEMO/loss_30/batch1"
-Pipeline2.result_dir_2 = "tuner_results/pipeline_GEMO/loss_20/batch1"
-Pipeline2.tensor_desc_file = "tuner_results/pipeline_GEMO/pipeline_GEMO_tensors.txt"
-Pipeline2.layer_file = "tuner_results/pipeline_GEMO/pipeline_GEMO_layers.txt"
-#Pipeline1.loss1_result_file = "tuner_results/vgg_cifar100/loss_1/promise_tuned_confs/promise_confs.txt"
-#Pipeline1.loss2_result_file = "tuner_results/vgg_cifar100/loss_2/promise_tuned_confs/promise_confs.txt"
-Pipeline2.autotuner_runs = 300
-Pipeline2.tuner_accuracy = 95
-Pipeline2.promise_accuracy = 95
-Pipeline2.validation_accuracy = 95
-
-bench_tuner_data["pipeline_GEMO"] = Pipeline2
-
-
-
-
-Pipeline3 = Benchmark()
-Pipeline3.tuner_binary = "pipeline_GSME"
-Pipeline3.promise_binary = "pipeline_GSME_promise"
-Pipeline3.validation_binary = "pipeline_GSME_valid"
-Pipeline3.num_flags = 9
-Pipeline3.num_layers = 4
-Pipeline3.error_range_1 = 10
-Pipeline3.error_range_2 = 15
-Pipeline3.start_promise_range = 2
-Pipeline3.skip_layer_str = "1_3"
-Pipeline3.result_dir_1 = "tuner_results/pipeline_GSME/loss_30/batch1"
-Pipeline3.result_dir_2 = "tuner_results/pipeline_GSME/loss_20/batch1"
-Pipeline3.tensor_desc_file = "tuner_results/pipeline_GSME/pipeline_GSME_tensors.txt"
-Pipeline3.layer_file = "tuner_results/pipeline_GSME/pipeline_GSME_layers.txt"
-#Pipeline1.loss1_result_file = "tuner_results/vgg_cifar100/loss_1/promise_tuned_confs/promise_confs.txt"
-#Pipeline1.loss2_result_file = "tuner_results/vgg_cifar100/loss_2/promise_tuned_confs/promise_confs.txt"
-Pipeline3.autotuner_runs = 300
-Pipeline3.tuner_accuracy = 95
-Pipeline3.promise_accuracy = 95
-Pipeline3.validation_accuracy = 95
-
-bench_tuner_data["pipeline_GSME"] = Pipeline3
-
-
-Pipeline4 = Benchmark()
-Pipeline4.tuner_binary = "pipeline_GEO"
-Pipeline4.promise_binary = "pipeline_GEO_promise"
-Pipeline4.validation_binary = "pipeline_GEO_valid"
-Pipeline4.num_flags = 7
-Pipeline4.num_layers = 3
-Pipeline4.error_range_1 = 10
-Pipeline4.error_range_2 = 15
-Pipeline4.start_promise_range = 2
-Pipeline4.skip_layer_str = "1_2"
-Pipeline4.result_dir_1 = "tuner_results/pipeline_GEO/loss_30/batch1"
-Pipeline4.result_dir_2 = "tuner_results/pipeline_GEO/loss_20/batch1"
-Pipeline4.tensor_desc_file = "tuner_results/pipeline_GEO/pipeline_GEO_tensors.txt"
-Pipeline4.layer_file = "tuner_results/pipeline_GEO/pipeline_GEO_layers.txt"
-#Pipeline1.loss1_result_file = "tuner_results/vgg_cifar100/loss_1/promise_tuned_confs/promise_confs.txt"
-#Pipeline1.loss2_result_file = "tuner_results/vgg_cifar100/loss_2/promise_tuned_confs/promise_confs.txt"
-Pipeline4.autotuner_runs = 300
-Pipeline4.tuner_accuracy = 95
-Pipeline4.promise_accuracy = 95
-Pipeline4.validation_accuracy = 95
-
-bench_tuner_data["pipeline_GEO"] = Pipeline4
-
-
-Pipeline5 = Benchmark()
-Pipeline5.tuner_binary = "pipeline_GSM"
-Pipeline5.promise_binary = "pipeline_GSM_promise"
-Pipeline5.validation_binary = "pipeline_GSM_valid"
-Pipeline5.num_flags = 6
-Pipeline5.num_layers = 3
-Pipeline5.error_range_1 = 10
-Pipeline5.error_range_2 = 15
-Pipeline5.start_promise_range = 2
-Pipeline5.skip_layer_str = "1_1"
-Pipeline5.result_dir_1 = "tuner_results/pipeline_GSM/loss_30/batch1"
-Pipeline5.result_dir_2 = "tuner_results/pipeline_GSM/loss_20/batch1"
-Pipeline5.tensor_desc_file = "tuner_results/pipeline_GSM/pipeline_GSM_tensors.txt"
-Pipeline5.layer_file = "tuner_results/pipeline_GSM/pipeline_GSM_layers.txt"
-#Pipeline1.loss1_result_file = "tuner_results/vgg_cifar100/loss_1/promise_tuned_confs/promise_confs.txt"
-#Pipeline1.loss2_result_file = "tuner_results/vgg_cifar100/loss_2/promise_tuned_confs/promise_confs.txt"
-Pipeline5.autotuner_runs = 300
-Pipeline5.tuner_accuracy = 95
-Pipeline5.promise_accuracy = 95
-Pipeline5.validation_accuracy = 95
-
-bench_tuner_data["pipeline_GSM"] = Pipeline5
-
-"""
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/buildRtConfig.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/buildRtConfig.py
deleted file mode 100644
index 81115b0587b36b68e6acfb92ab14d7424ee117f5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/buildRtConfig.py
+++ /dev/null
@@ -1,526 +0,0 @@
-
-
-import os
-import sys
-from benchmarks import bench_tuner_data
-from swing_selection import loadLayerDesc
-from benchmarks import batch_id
-
-
-op_mapping = {}
-op_mapping["conv"] = "conv"
-op_mapping["depthwise_conv"] = "group_conv"
-op_mapping["dense"] = "mul"
-op_mapping["batchnorm"] = "batchnorm"
-op_mapping["pool"] = "pool_max"
-op_mapping["pool_mean"] = "pool_mean"
-op_mapping["activation"] = "relu"
-op_mapping["tanh"] = "tanh"
-op_mapping["add"] = "add"
-
-
-
-approx_map = {}
-approx_map["1"] = "swing_level 1"
-approx_map["2"] = "swing_level 2"
-approx_map["3"] = "swing_level 3"
-approx_map["4"] = "swing_level 4"
-approx_map["5"] = "swing_level 5"
-approx_map["6"] = "swing_level 6"
-approx_map["7"] = "swing_level 7"
-
-#approx_map["8"] = "perf 1"
-#approx_map["9"] = "perf 2"
-approx_map["11"] = "fp32 1"
-approx_map["12"] = "fp16 1"
-
-
-approx_map["21"] = "perf 21"
-approx_map["22"] = "perf 22"
-approx_map["23"] = "perf 23"
-approx_map["24"] = "perf 24"
-approx_map["25"] = "perf 25"
-approx_map["26"] = "perf 26"
-approx_map["27"] = "perf 27"
-approx_map["28"] = "perf 28"
-approx_map["29"] = "perf 29"
-approx_map["30"] = "perf 30"
-
-approx_map["31"] = "samp 31"
-approx_map["32"] = "samp 32"
-approx_map["33"] = "samp 33"
-approx_map["34"] = "samp 34"
-approx_map["35"] = "samp 35"
-approx_map["36"] = "samp 36"
-
-
-
-class Config:
-  def __init__(self):
-    self.avg_accuracy = 0
-    self.avg_loss = 0
-    self.speedup = 1
-    self.fname = ""
-    self.flags = []
-
-
-
-
-def isLayer(layer_comp):
-  if layer_comp[0] == "dense" or layer_comp[0] == "conv":
-    return True
-  else:
-    return False
-
-
-def getOpMapping(op_name):
-
-  
-  if op_name not in op_mapping:
-    print ("ERROR: OP not found!! = ", op_name, "\n")
-    sys.exit(0)
-
-  return op_mapping[op_name]
-
-
-
-def getApproxMapping(flag, layer_comp):
-
-  flag_str = str(flag)
-  if flag_str not in approx_map:
-    print ("ERROR: OP not found!! = ", flag_str, "\n")
-    sys.exit(0)
-
-  if "dense" in layer_comp and flag > 7:
-    if flag == 12:
-      return "fp16 1"
-    else:
-      return "fp32 1"
-    
-
-  return approx_map[flag_str]
-
-
-
-def skipFile(fname):
-
-  skip_files = {}
-  skip_files["confidence_summary.txt"] = 1
-  skip_files["promise_confs.txt"] = 1
-
-  if "accuracy" in fname: # *_accuracy files should be skipped
-    return True
-
-  if "norms" in fname: # *_accuracy files should be skipped
-    return True
-
-  if ".#" in fname: # *_accuracy files should be skipped
-    return True
-
-  #if "_promise" in fname: # *_accuracy files should be skipped
-  #  return True
-
-  if not fname[-1].isdigit():
-    return True
-  
-  if fname in skip_files:
-    return True
-  else:
-    return False
-    
-
-
-def parseTopLine(x):
-
-  toks = x.split()
-
-  speedup = 1.0
-  accuracy = 0.0
-  for tok in toks:
-    if "avg_accuracy" in tok:
-      avg_accuracy = float(tok.split("=")[1])
-    if "speedup" in tok:
-      speedup = float(tok.split("=")[1])
-    
-
-  return avg_accuracy, speedup
-
-
-
-def loadConfigData(result_dir, baseline_accuracy, sub_dir = "high_confidence"):
-
-  config_arr = []
-  
-  #result_dir += "/promise_tuner/high_confidence/"
-  #result_dir += "/algo_tuner/high_confidence/"
-  result_dir += "/algo_tuner/" + sub_dir + "/"
-  file_names = os.listdir(result_dir)
-
-  
-  for fname in file_names:
-    if not skipFile(fname):
-
-      fpath = result_dir + fname  
-      config = Config()
-      f = open(fpath, "r")
-
-      it = 0
-      for x in f:
-        if x.strip == "":
-            continue       
-        if it == 0:
-          avg_accuracy, speedup = parseTopLine(x)
-          config.avg_accuracy = avg_accuracy
-          config.avg_loss = baseline_accuracy - avg_accuracy 
-          config.speedup = speedup
-          config.fname = fname
-          #print ("acc = " + str(avg_accuracy) + "\n")
-        else:
-          flag = int(x.strip())
-          config.flags.append(flag)
-        it += 1
-  
-      config_arr.append(config)
-        
-
-  return config_arr      
-      
-
-
-
-def loadConfigsFromDir(result_dir, baseline_accuracy):
-
-  config_arr = []
-  
-  #result_dir += "/promise_tuner/high_confidence/"
-  #result_dir += "/algo_tuner/high_confidence/"
-  #result_dir += "/algo_tuner/" + sub_dir + "/"
-  file_names = os.listdir(result_dir)
-
-  
-  for fname in file_names:
-    if not skipFile(fname):
-
-      fpath = result_dir + fname  
-      config = Config()
-      f = open(fpath, "r")
-
-      it = 0
-      for x in f:
-        if x.strip == "":
-            continue       
-        if it == 0:
-          avg_accuracy, speedup = parseTopLine(x)
-          config.avg_accuracy = avg_accuracy
-          config.avg_loss = baseline_accuracy - avg_accuracy 
-          config.speedup = speedup
-          config.fname = fname
-          #print ("acc = " + str(avg_accuracy) + "\n")
-        else:
-          flag = int(x.strip())
-          config.flags.append(flag)
-        it += 1
-  
-      config_arr.append(config)
-        
-
-  return config_arr      
-      
-
-
-
-
-
-
-def loadPromiseConfigs(result_dir, baseline_accuracy, sub_dir = "promise_test"):
-
-  config_arr = []  
-  result_dir += "/algo_tuner/" + sub_dir + "/"
-  file_names = os.listdir(result_dir)
-  
-  for fname in file_names:
-    if "_promise" in fname:
-
-      fpath = result_dir + fname  
-      config = Config()
-      f = open(fpath, "r")
-
-      it = 0
-      for x in f:
-        if x.strip == "":
-            continue
-          
-        if it == 0:
-          avg_accuracy, speedup = parseTopLine(x)
-          config.avg_accuracy = avg_accuracy
-          config.avg_loss = baseline_accuracy - avg_accuracy 
-          config.speedup = speedup
-          config.fname = fname
-          #print ("acc = " + str(avg_accuracy) + "\n")
-        else:
-          flag = int(x.strip())
-          config.flags.append(flag)
-        
-        it += 1
-  
-      config_arr.append(config)
-        
-
-  return config_arr      
-      
-
-
-
-
-def getFP(flag):
-
-  if flag < 11:
-    return "fp16"
-  else:
-    return "fp32"
-
-
-
-def getHardwareTarget(flag):
-
-  if flag <= 7:
-    return "promise"
-  else:
-    return "gpu"
-
-  return "gpu"
-
-
-def handlePromiseConfs(flag, layer_comp):
-
-  approx_tech = getApproxMapping(flag, layer_comp)      
-  config_str = ""
-  if flag <= 7:
-    config_str += approx_tech + " "  
-
-  return config_str
-
-
-def handleGPUApproxs(flag, layer_comp):
-
-  approx_tech = getApproxMapping(flag, layer_comp)
-  config_str = ""
-  if flag > 7:
-    print ("flag = ", flag)
-    config_str += getOpMapping(layer_comp[0]) + " " + approx_tech + " "
-    for op in layer_comp[1:]:
-      print (layer_comp[1:])
-      print (op)
-      
-      op_name = getOpMapping(op)
-      config_str += str(op_name) + " " + getFP(flag) + " 1 "
-
-  return config_str
-
-
-def generateBaselineConfig(layer_comp):
-
-  config_str = ""
-  config_str += "gpu "
-  for op in layer_comp:
-    op_name = getOpMapping(op)
-    config_str += str(op_name) + " fp16 1 "
-
-  return config_str
-
-
-
-def buildConfigStr(config, layer_desc):
-
-  index = 1
-  it = 0
-  flags = config.flags
-  config_str = ""
-  
-  for layer_comp in layer_desc:
-    config_str += str(index) + " "
-
-    print ("laye_comp = ", layer_comp)
-    
-    if isLayer(layer_comp):
-      flag = flags[it]
-      it += 1
-
-      print ("flag* = ", flag) 
-      # Add Target Target - GPU, PROMISE
-      config_str += getHardwareTarget(flag) + " "
-
-      print ("config_str = ", config_str)
-      
-      config_str += handlePromiseConfs(flag, layer_comp)
-      config_str += handleGPUApproxs(flag, layer_comp)
-      
-    else: # if a non-Layer Operation
-      config_str += generateBaselineConfig(layer_comp)
-      
-    
-    config_str += "\n"    
-    index += 1
-
-
-  config_str += str(index) + " gpu softmax fp32 1\n"  
-    
-  return config_str
-
-
-def adjustLoss(loss):
-
-  if loss < 0.3:
-    loss += 0.4    
-  else:
-    loss = loss * 1.5
-
-  return loss
-    
-
-
-def dumpConfig(layer_desc, config_arrs, result_dir):
-
-  f = open(result_dir + "/tuner_pareto_confs_" + batch_id + ".txt", "w+")
-  it = 1
-  for config in config_arrs:
-    f.write("+++++\n")
-    f.write("conf" + str(it) + " " + str(config.speedup) + " 0 " + str(config.avg_accuracy) + " " + str(adjustLoss(config.avg_loss)) + "\n")
-
-    config_str = buildConfigStr(config, layer_desc)
-
-    f.write(config_str)
-    
-    f.write("-----\n")
-          
-    it += 1
-    
-
-
-def prependBaseline(Bench):
-
-  f1 = open(Bench.base_dir + "/tuner_confs_base.txt", "r")
-  baseline_str = f1.read()
-  f1.close()
-
-  f2 = open(Bench.base_dir + "/tuner_pareto_confs_" + batch_id + ".txt", "r")
-  config_str = f2.read()
-  f2.close()
-
-  f3 = open(Bench.base_dir + "/tuner_pareto_confs_" + batch_id + ".txt", "w+")
-  f3.write(baseline_str)
-  f3.write(config_str)
-  f3.close()
-
-
-    
-def generateConf(Bench):
-
-  layer_desc = loadLayerDesc(Bench.layer_file)
-
-  print ("layer_desc = ", layer_desc)
-  
-  #config_arr1 = loadConfigData(Bench.result_dir_1, Bench.promise_accuracy)
-  #config_arr2 = loadConfigData(Bench.result_dir_2, Bench.promise_accuracy)
-  #config_arr3 = loadConfigData(Bench.result_dir_3, Bench.promise_accuracy)
-
-  result_dir1 = Bench.result_dir_1 + "/algo_tuner/pareto/"
-  result_dir2 = Bench.result_dir_2 + "/algo_tuner/pareto/"
-  result_dir3 = Bench.result_dir_3 + "/algo_tuner/pareto/"
- 
-  config_arr1 = loadConfigsFromDir(result_dir1, Bench.promise_accuracy)
-  config_arr2 = loadConfigsFromDir(result_dir2, Bench.promise_accuracy)
-  config_arr3 = loadConfigsFromDir(result_dir3, Bench.promise_accuracy)
-
-  config_arrs = config_arr1 + config_arr2 + config_arr3
-  
-  dumpConfig(layer_desc, config_arrs, Bench.base_dir) 
-
-  prependBaseline(Bench)
-   
-  
-
-
-def dumpBaselineConfs(Bench):
-
-  layer_desc = loadLayerDesc(Bench.layer_file)
-
-  print ("layer_desc = ", layer_desc)
-  
-  f = open(Bench.base_dir + "/tuner_confs_base.txt", "w+")
- 
-  f.write("+++++\n")
-  f.write("conf" + str(1) + " " + str(1) + " 0 " + str(Bench.promise_accuracy) + " " + str(0) + "\n")
-
-  config = Config()
-  flags = []
-  for i in range(Bench.num_layers):
-    flags.append(11)
-    
-  config.flags = flags
-  config_str = buildConfigStr(config, layer_desc)
-
-  f.write(config_str)  
-  f.write("-----\n")
-          
-
-  f.close()
-  
-  
-  #f.write("+++++\n")
-  #f.write("conf" + str(2) + " " + str(1.5) + " 0 " + str(Bench.promise_accuracy) + " " + str(0) + "\n")
-
-  #config = Config()
-  #flags = []
-  #for i in range(Bench.num_layers):
-  #  flags.append(12)
-    
-  #config.flags = flags
-  #config_str = buildConfigStr(config, layer_desc)
-
-  #f.write(config_str)    
-  #f.write("-----\n")
-
-
-
-  
-
-
-if __name__ == "__main__":
-
-  Bench = bench_tuner_data["alexnet_cifar10"]
-  generateConf(Bench)
-  dumpBaselineConfs(Bench)
-  
-  Bench = bench_tuner_data["alexnet2_cifar10"]
-  generateConf(Bench)
-  dumpBaselineConfs(Bench)
-  
-  Bench = bench_tuner_data["vgg16_cifar10"]
-  generateConf(Bench)
-  dumpBaselineConfs(Bench)
-  
-  Bench = bench_tuner_data["vgg16_cifar100"]
-  generateConf(Bench)
-  dumpBaselineConfs(Bench)
-  
-  Bench = bench_tuner_data["resnet18_cifar10"]
-  generateConf(Bench)
-  dumpBaselineConfs(Bench)
-  
-  Bench = bench_tuner_data["lenet_keras"]
-  generateConf(Bench)
-  dumpBaselineConfs(Bench)
-  
-  Bench = bench_tuner_data["mobilenet_cifar10"]
-  generateConf(Bench)
-  dumpBaselineConfs(Bench)
-    
-  Bench = bench_tuner_data["mobilenet_shallow"]
-  generateConf(Bench)
-  dumpBaselineConfs(Bench)
-
-
-
-
-  
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/compareResults.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/compareResults.py
deleted file mode 100644
index 6ee7466242d47299d5aa7622f15aef7d35832a2a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/compareResults.py
+++ /dev/null
@@ -1,66 +0,0 @@
-
-
-
-import os
-from benchmarks import bench_tuner_data
-from buildRtConfig import loadConfigData
-from buildRtConfig import loadConfigsFromDir
-
-
-
-def compareBench(batch_ids, Bench):
-
-  losses = ["1", "2", "3"]
-  for loss in losses:
-    print ("\n Loss = ", loss, " % \n")
-    for id in batch_ids:
-      result_dir = Bench.base_dir + "/loss_" + loss + "/batch" + id 
-      #config_arr = loadConfigData(result_dir, Bench.promise_accuracy, "high_confidence")
-
-      #result_dir += "/algo_tuner/high_confidence/"
-      result_dir += "/promise_tuner3/high_confidence/"
-      
-      config_arr = loadConfigsFromDir(result_dir, Bench.promise_accuracy)
-      
-      count = len(config_arr)
-      if len(config_arr) > 0:
-        max_speedup = max(config.speedup for config in config_arr)
-      else:
-        max_speedup = 1.0  
-      print ("Bench = ", Bench.promise_binary, " BatchID = ", id, " Loss = ", loss, " Count = ", count, " MaxS = ", max_speedup)
-  
-
-
-
-if __name__ == "__main__":
-
-
-  batch_ids = []
-
-  #batch_ids.append("13") # No Error Sens - baseline
-  #batch_ids.append("14") # Ops Skipped 10% for Loss1, 25% Loss2, 40% Loss3
-  #batch_ids.append("15") # 3 differnet levels for each of Loss1, Loss2, Loss3
-  #batch_ids.append("19") # Baseline + Pareto
-  #batch_ids.append("20") # Batch18 + Pareto
-  
-  #batch_ids.append("101") # Algo-specific tuning
-
-  #batch_ids.append("201") # Algo-specific tuning
-
-  #---- batch_ids.append("202") # Algo-specific tuning
-  #batch_ids.append("212") # Algo-specific tuning
-  #batch_ids.append("211") # Algo-specific tuning
-
-
-  batch_ids.append("220") # Algo-specific tuning
- 
-  
-  compareBench(batch_ids, bench_tuner_data["lenet_keras"])
-  compareBench(batch_ids, bench_tuner_data["alexnet_cifar10"])
-  compareBench(batch_ids, bench_tuner_data["mobilenet_cifar10"])
- 
-  compareBench(batch_ids, bench_tuner_data["alexnet2_cifar10"])
-  compareBench(batch_ids, bench_tuner_data["vgg16_cifar10"])
-  compareBench(batch_ids, bench_tuner_data["vgg16_cifar100"])    
-  compareBench(batch_ids, bench_tuner_data["resnet18_cifar10"])
-  compareBench(batch_ids, bench_tuner_data["mobilenet_shallow"])
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/compute_confs.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/compute_confs.py
deleted file mode 100644
index f82c09095ceac24d8ee4a765f1d63be987b625a9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/compute_confs.py
+++ /dev/null
@@ -1,56 +0,0 @@
-
-
-from swing_selection import compute_swing_selection
-from swing_selection2 import compute_swing_selection2
-  
-
-def computeBenchSwings(Bench):
-
-  dir_prefix = "../build_tuner/"
-  
-  loss_confs = []
-  conf_ranks = []
-  # Swing selection for 1% and 2% results
-  #Bench = bench_tuner_data[bench_name]
-  tuned_result_dir = dir_prefix + Bench.result_dir_1 + "/high_confidence/" 
-  layer_file = Bench.layer_file
-  layer_swings, file_names = compute_swing_selection(tuned_result_dir, layer_file)
-  loss_confs.append(layer_swings)
-  conf_ranks.append(file_names)
-  print (file_names)
-  
-  tuned_result_dir = dir_prefix + Bench.result_dir_2 + "/high_confidence/" 
-  layer_swings, file_names = compute_swing_selection(tuned_result_dir, layer_file)
-  loss_confs.append(layer_swings)
-  conf_ranks.append(file_names)
-  print (file_names)
-  
-
-  return loss_confs, conf_ranks
-
-
-
-
-
-def computePSNRBenchSwings(Bench):
-
-  loss_confs = []
-  conf_ranks = []
-  # Swing selection for 1% and 2% results
-  #Bench = bench_tuner_data[bench_name]
-  tuned_result_dir = Bench.result_dir_1 + "/high_confidence/" 
-  layer_file = Bench.layer_file
-  layer_swings, file_names = compute_swing_selection2(tuned_result_dir, layer_file)
-  loss_confs.append(layer_swings)
-  conf_ranks.append(file_names)
-  print (file_names)
-  
-  tuned_result_dir = Bench.result_dir_2 + "/high_confidence/" 
-  layer_swings, file_names = compute_swing_selection2(tuned_result_dir, layer_file)
-  loss_confs.append(layer_swings)
-  conf_ranks.append(file_names)
-  print (file_names)
-  
-
-  return loss_confs, conf_ranks
-
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/error_sensitivity.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/error_sensitivity.py
deleted file mode 100644
index 186477164240694ebae63f019b7824dc1e12c83b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/error_sensitivity.py
+++ /dev/null
@@ -1,378 +0,0 @@
-
-
-import subprocess
-import os
-import operator
-from benchmarks import bench_tuner_data
-from swing_selection import loadLayerDesc
-import math
-
-
-def constructTunerFile(num_flags, tensor_id, error_level, default_error):
-
-  f = open("opentuner_flags", "w+")
-
-  for i in range(num_flags):
-    if i == tensor_id:
-      f.write(str(error_level) + "\n")
-    else:
-      f.write(str(default_error) + "\n")
-
-  f.close()
-    
-
-
-def runAndTestError(binary_name, gold_acc):
-
-  num_runs = 10
-
-  binary_name = "./" + binary_name
-  FNULL = open(os.devnull, 'wb')
-  p = subprocess.Popen([binary_name, str(num_runs)], stdout = FNULL)
-  p.wait()
-
-  f = open("run_accuracies.txt")
-
-  total_err = 0.0
-  for x in f:
-    acc = float(x.strip())    
-    total_err += (gold_acc - acc)
-
-  avg_err = total_err / num_runs
-
-  return avg_err
-    
-
-
-def roundDecimal(val):
-
-  new_val = int(val * 10000)
-  new_val = float(new_val) / 10000
-
-  return new_val
-
-
-
-
-def test_sensitivity(Bench):
-
-  tensor_errors = []
-  
-  error_levels = [6, 9, 12, 15]
-  num_flags = Bench.num_flags
-
-  for tensor_id in range(num_flags):
-    total_error = 0
-    for error_level in error_levels:
-      constructTunerFile(num_flags, tensor_id, error_level, 0)
-      error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
-      #print (tensor_id, error_level, error)
-      total_error += error
-
-    avg_error = total_error / len(error_levels)
-
-    tensor_errors.append([tensor_id, avg_error])
-
-
-  print ("\n\n*** Per-Tensor Avg Errors \n\n")
-
-  f_name = Bench.base_dir + "/tensor_errors_multiple.txt"  
-  f = open(f_name, "w+")
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-    f.write(str(i) +  "\t" + str(tensor_errors[i][1]) + "\n")
-
-  f.close()
-
-  f_name = Bench.base_dir + "/tensor_errors_ranked_1000.txt"  
-  f2 = open(f_name, "w+")
-  tensor_errors.sort(key=operator.itemgetter(1))
-
-
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-
-    f2.write(str(tensor_errors[i][0]) +  "\t" + str(tensor_errors[i][1]) + "\n")
-    
-
-  f2.close()
-
-
-
-def test_sensitivity2(Bench):
-
-  num_flags = Bench.num_flags
-
-  constructTunerFile(num_flags, 0, 6, 6)
-  error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
-
-  ref_acc = Bench.tuner_accuracy - error
-  print ("*** Gold accuracy = ", Bench.tuner_accuracy, "  Ref accuracy = ", ref_acc, " *** \n\n")
-  
-  
-  tensor_errors = []
-  
-  error_levels = [6, 9, 12, 15]
-
-  for tensor_id in range(num_flags):
-    total_error = 0
-    for error_level in error_levels:
-      constructTunerFile(num_flags, tensor_id, error_level, 6)
-      error = runAndTestError(Bench.tuner_binary, ref_acc)
-      print (tensor_id, error_level, error)
-      total_error += error
-
-    avg_error = total_error / len(error_levels)
-
-    tensor_errors.append([tensor_id, avg_error])
-
-
-  print ("\n\n*** Per-Tensor Avg Errors \n\n")
-
-  f_name = Bench.base_dir + "/tensor_composite_errors.txt"  
-  f = open(f_name, "w+")
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-    f.write(str(i) +  "\t" + str(tensor_errors[i][1]) + "\n")
-
-  f.close()
-
-  f_name = Bench.base_dir + "/tensor_composite_errors_ranked.txt"  
-  f2 = open(f_name, "w+")
-  tensor_errors.sort(key=operator.itemgetter(1))
-
-
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-
-    f2.write(str(tensor_errors[i][0]) +  "\t" + str(tensor_errors[i][1]) + "\n")
-    
-
-  f2.close()
-
-
-
-def test_sensitivity3(Bench):
-
-  tensor_errors = []
-  
-  error_levels = [2, 5, 8, 11, 14, 17]
-  num_flags = Bench.num_flags
-
-  for tensor_id in range(num_flags):
-    total_error = 0
-    errors = []
-    for error_level in error_levels:
-      constructTunerFile(num_flags, tensor_id, error_level, 0)
-      error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
-      print (tensor_id, error_level, error)
-      errors.append(error)
-      
-    tensor_errors.append([tensor_id, errors])
-
-
-  print ("\n\n*** Per-Tensor Avg Errors \n\n")
-
-  f_name = Bench.base_dir + "/tensor_errors_multiple.txt"  
-  f = open(f_name, "w+")
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-    f.write(str(i))
-    for j in range(len(tensor_errors[i][1])):
-      val = roundDecimal(tensor_errors[i][1][j])
-      f.write("\t" + str(val) )
-    f.write("\n")
-      
-  f.close()
-
-
-
-
-
-def test_sensitivity4(Bench):
-
-  num_flags = Bench.num_flags
-
-  constructTunerFile(num_flags, 0, 5, 5)
-  error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
-
-  ref_acc = Bench.tuner_accuracy - error
-  print ("*** Gold accuracy = ", Bench.tuner_accuracy, "  Ref accuracy = ", ref_acc, " *** \n\n")
-  
-  
-  tensor_errors = []  
-  error_levels = [4, 8, 11, 14, 16, 19]
-
-  for tensor_id in range(num_flags):
-    errors = []
-    for error_level in error_levels:
-      constructTunerFile(num_flags, tensor_id, error_level, 5)
-      error = runAndTestError(Bench.tuner_binary, ref_acc)
-      print (tensor_id, error_level, error)
-      errors.append(error)
-
-    tensor_errors.append([tensor_id, errors])
-
-
-
-  print ("\n\n*** Per-Tensor Avg Errors \n\n")
-
-  f_name = Bench.base_dir + "/composite_errors.txt"  
-  f = open(f_name, "w+")
-  for i in range(len(tensor_errors)):
-    print (i, tensor_errors[i][1])
-    f.write(str(i))
-    for j in range(len(tensor_errors[i][1])):
-      val = roundDecimal(tensor_errors[i][1][j])
-      f.write("\t" + str(val) )
-    f.write("\n")
-      
-  f.close()
-
-  
-
-  
-
-def readTensorErrs(result_dir):
-
-  tensor_errs = []
-  f = open(result_dir + "/tensor_errors.txt")
-  
-  for x in f:
-    err = float(x.split()[1])
-    tensor_errs.append(err)
-    
-  return tensor_errs
-
-
-
-def readTensorErrs2(result_dir):
-
-  tensor_errs = []
-  f = open(result_dir + "/tensor_errors_multiple.txt")
-  
-  for x in f:
-    toks = x.split()
-    total_err = 0.0
-    for tok in toks[2:-1]:
-      err = float(tok)
-      total_err += err
-
-    avg_err = total_err / len(toks[2:-1])  
-    tensor_errs.append(avg_err)
-    
-  return tensor_errs
-
-
-def isSkipLayer(layer):
-
-  if "dense" in layer or "conv" in layer:
-    return False
-  else:
-    return True
-  
-
-def readLayerCosts(cost_file):
-  
-  f = open(cost_file)
-  layer_costs = []
-  for x in f:
-    cost = float(x.strip())
-    layer_costs.append(cost)
-
-  return layer_costs
-  
-
-
-disable_skipping = False
-
-def select_skip_layers(Bench, percent_to_skip):
-
-  if disable_skipping:
-    return "0"
-  
-  result_dir = Bench.base_dir
-  layer_file = Bench.layer_file
-
-  tensor_errs = readTensorErrs2(result_dir)
-  layer_costs = readLayerCosts(Bench.cost_file)
-  layer_desc = loadLayerDesc(layer_file)
-
-  it = 0
-  index = 0
-  layer_errs = []
-  for layer in layer_desc:
-    layer_len = len(layer)
-    avg_err = tensor_errs[index]
-    index += layer_len
- 
-    if isSkipLayer(layer):
-      continue
-
-    cost = (math.sqrt(layer_costs[it])) / 100;
-    ERR_IMPACT = avg_err / cost
-    #print ("layer, ", it, " avg_err = ", avg_err, " cost = ", cost, " err_impact = ", ERR_IMPACT)
-
-    layer_errs.append((ERR_IMPACT, it))
-    it += 1
-
-  layer_errs.sort(key=operator.itemgetter(0), reverse=True)
-
-  to_skip = len(layer_errs)
-  to_skip = math.ceil((percent_to_skip / 100.0) * to_skip)
-
-  skip_str = ""
-  it = 0
-  for err in layer_errs:
-    if it >= to_skip:
-      break
-
-    skip_str += str(err[1])
-    if it < to_skip - 1:
-      skip_str += "_"
-      
-    it += 1
-    
-  return skip_str
-
-
-
-
-
-
-if __name__ == "__main__":
-
-
-  AlexNet = bench_tuner_data["alexnet_cifar10"]
-  skip_str = select_skip_layers(AlexNet, 10)
-  print ("AlexNet skip_str = ", skip_str)
-
-
-  AlexNet2 = bench_tuner_data["alexnet2_cifar10"]
-  skip_str = select_skip_layers(AlexNet2, 15)
-  print ("AlexNet2 skip_str = ", skip_str)
-
-
-  VGG16 = bench_tuner_data["vgg16_cifar10"]
-  skip_str = select_skip_layers(VGG16, 15)
-  print ("VGG16 skip_str = ", skip_str)
-
-
-  VGG16_100 = bench_tuner_data["vgg16_cifar100"]
-  skip_str = select_skip_layers(VGG16_100, 15)
-  print ("VGG16_100 skip_str = ", skip_str)
-
-  
-  ResNet = bench_tuner_data["resnet18_cifar10"]
-  skip_str = select_skip_layers(ResNet, 10)
-  print ("ResNet skip_str = ", skip_str)
-
-
-  MobileNet = bench_tuner_data["mobilenet_cifar10"]
-  skip_str = select_skip_layers(MobileNet, 15)
-  print ("MobileNet skip_str = ", skip_str)
-
-
-  MobileNet_SH = bench_tuner_data["mobilenet_shallow"]
-  skip_str = select_skip_layers(MobileNet_SH, 15)
-  print ("MobileNet_SH skip_str = ", skip_str)
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/pareto_curve.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/pareto_curve.py
deleted file mode 100644
index d90403be23fae547fde9e2ac4996f5cca3b0e5fb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/pareto_curve.py
+++ /dev/null
@@ -1,313 +0,0 @@
-
-
-from buildRtConfig import loadConfigData
-from benchmarks import bench_tuner_data
-import os
-import shutil
-
-
-AL_THRESHOLD = 0.1
-SPEEDUP_BAND_SIZE = 0.1
-ENERGY_BAND_SIZE = 10
-
-class Configuration:
-    def __init__(self, name, speedup, energy, accuracy, accuracy_loss, flags):
-        self.name = name
-        self.speedup = speedup
-        self.energy = energy
-        self.accuracy = accuracy
-        self.accuracy_loss = accuracy_loss
-        self.flags  = flags
-    def __repr__(self):
-        return repr((self.name, self.speedup, self.energy, self.accuracy, self.accuracy_loss))
-
-configuration_objects = [
-    Configuration('conf1', 1.05, 15, 85, 1.2, []),
-    Configuration('conf2', 2.51, 12, 83, 1.4, []),
-    Configuration('conf3', 2.05, 10, 84, 0.8, []),
-]
-
-def compute_pareto_points(configurations):
-    speedupconfigurations = []
-    energyconfigurations = []
-    #sort configurations based on speedup
-    sorted_configurations = sorted(configurations, key=lambda conf: conf.accuracy_loss)
-
-    start_idx = 0
-    while start_idx < len(sorted_configurations):
-        end_idx = start_idx + 1;
-        # find end_idx
-        while end_idx < len(sorted_configurations) and (sorted_configurations[end_idx].accuracy_loss - sorted_configurations[start_idx].accuracy_loss < AL_THRESHOLD) :
-            end_idx += 1
-        # find best speedup end energy in this accuracy loss level
-        sp = -1.0
-        sp_idx = 0
-        en = -1.0
-        en_idx = 0
-        for i in range(start_idx, end_idx):
-            if sorted_configurations[i].speedup > sp:
-                sp = sorted_configurations[i].speedup
-                sp_idx = i
-            if sorted_configurations[i].energy > en:
-                en = sorted_configurations[i].energy
-                en_idx = i
-        sp_not_dominated = True
-        # if not empty list of configurations
-        if speedupconfigurations:
-            if speedupconfigurations[-1].speedup >= sp:
-                sp_not_dominated = False
-        en_not_dominated = True
-        # if not empty list of configurations
-        if energyconfigurations:
-            if energyconfigurations[-1].energy >= en:
-                en_not_dominated = False
-        if sp_not_dominated:
-            speedupconfigurations.append(sorted_configurations[sp_idx])
-        if en_not_dominated:
-            energyconfigurations.append(sorted_configurations[en_idx])
-        # outer while loop variable increment
-        start_idx = end_idx
-    return [speedupconfigurations, energyconfigurations]
-
-
-def compute_pareto_points_with_margin(configurations, speedup_band_width, energy_band_width):
-    speedupconfigurations = []
-    energyconfigurations = []
-    #sort configurations based on speedup
-    sorted_configurations = sorted(configurations, key=lambda conf: conf.accuracy_loss)
-
-    idx_to_sp_conf_dict = {}
-    idx_to_en_conf_dict = {}
-
-    start_idx = 0
-    while start_idx < len(sorted_configurations):
-        end_idx = start_idx + 1;
-        # find end_idx
-        while end_idx < len(sorted_configurations) and (sorted_configurations[end_idx].accuracy_loss - sorted_configurations[start_idx].accuracy_loss < AL_THRESHOLD) :
-            end_idx += 1
-        # find best speedup end energy in this accuracy loss level
-        sp = -1.0
-        sp_idx = 0
-        en = -1.0
-        en_idx = 0
-        for i in range(start_idx, end_idx):
-            if sorted_configurations[i].speedup > sp:
-                sp = sorted_configurations[i].speedup
-                sp_idx = i
-            if sorted_configurations[i].energy < en:
-                en = sorted_configurations[i].energy
-                en_idx = i
-        sp_not_dominated = True
-        # if not empty list of configurations
-        if speedupconfigurations:
-            if speedupconfigurations[-1].speedup >= sp:
-                sp_not_dominated = False
-        en_not_dominated = True
-        # if not empty list of configurations
-        if energyconfigurations:
-            if energyconfigurations[-1].energy >= en:
-                en_not_dominated = False
-        if sp_not_dominated:
-            speedupconfigurations.append(sorted_configurations[sp_idx])
-        idx_to_sp_conf_dict[start_idx] = len(speedupconfigurations)-1
-        if en_not_dominated:
-            energyconfigurations.append(sorted_configurations[en_idx])
-        idx_to_en_conf_dict[start_idx] = len(energyconfigurations)-1
-        # outer while loop variable increment
-        start_idx = end_idx
-
-    # We want to add configurations in a band of a certain width around the curves
-    # not possible to do during contruction, because the quality of the curve would
-    # deteriorate quickly
-
-    AdjustedSpeedupCurve = []
-    AdjustedEnergyCurve = []
-
-    start_idx = 0
-    while start_idx < len(sorted_configurations):
-        end_idx = start_idx + 1;
-        # find end_idx
-        while end_idx < len(sorted_configurations) and (sorted_configurations[end_idx].accuracy_loss - sorted_configurations[start_idx].accuracy_loss < AL_THRESHOLD) :
-            end_idx += 1
-        for i in range(start_idx, end_idx):
-            if sorted_configurations[i].speedup + speedup_band_width >= speedupconfigurations[idx_to_sp_conf_dict[start_idx]].speedup:
-                AdjustedSpeedupCurve.append(sorted_configurations[i])
-            if sorted_configurations[i].energy + energy_band_width >= energyconfigurations[idx_to_en_conf_dict[start_idx]].energy:
-                AdjustedEnergyCurve.append(sorted_configurations[i])
-        # outer while loop variable increment
-        start_idx = end_idx
-
-    return [AdjustedSpeedupCurve, AdjustedEnergyCurve]
-
-
-
-def findParetoConfigs(base_dir, accuracy):
-
-  result_dir = base_dir + "/algo_tuner/pareto/"
-  try:
-      os.mkdir(result_dir)
-  except:
-      print ("could not create dir")
-
-  input_dir = base_dir    
-  config_arr = loadConfigData(input_dir, accuracy, "high_confidence")
-
-  config_list = []
-  it = 0
-  for config in config_arr:
-    config = Configuration(config.fname , config.speedup, 100, config.avg_accuracy, config.avg_loss, config.flags)
-    config_list.append(config)
-
-
-  if (len(config_list) > 0):   
-    max_speedup = max(config.speedup for config in config_list)
-  else:
-    max_speedup = 1.5
-  
-  SPEEDUP_BAND_SIZE = 0.05 # max_speedup * 1.0 / 12 # 4  # 20% of the max speedup
-  ENERGY_BAND_SIZE = 10
-
-  print ("max_speedup = ", max_speedup, " BAND_SIZE = ", SPEEDUP_BAND_SIZE)
-         
-
-  print ("*SPEEDUP_BAND_SIZE = ", SPEEDUP_BAND_SIZE)
-  
-  ASC, AEC = compute_pareto_points_with_margin(config_list, SPEEDUP_BAND_SIZE, ENERGY_BAND_SIZE)
-
-
-  #print (ASC)
-  #print (config_list)
-
-  if len(ASC) < 5:
-    ASC = config_list    
-
-
-  if len(ASC) > 50:
-    ASC, AEC = compute_pareto_points_with_margin(config_list, SPEEDUP_BAND_SIZE/4, ENERGY_BAND_SIZE)
- 
-  
-  print ("len(config_list) = ", len(config_list))
-  print ("len(ASC) = ", len(ASC))
-  
-  for conf in ASC:
-    src_path = base_dir + "/algo_tuner/high_confidence/" + conf.name
-    dst_path = base_dir + "/algo_tuner/pareto/" + conf.name
-    shutil.copy(src_path, dst_path)
-
-  return ASC
-
-
-
-def flagsPerLayer(ASC, num_layers):
-
-  layer_flags = []
-  for i in range(num_layers):
-    layer_map = {}
-    layer_flags.append(layer_map)
-    
-
-  for config in ASC:
-    config_flags = config.flags
-    for i in range(len(config_flags)):
-      layer_flags[i][config_flags[i]] = 1
-
-      
-  print (layer_flags)
-    
-  return layer_flags
-
-  
-    
-    
-
-
-def dumpBenchPareto(Bench):
-
-  result_dir1 = Bench.result_dir_1
-  result_dir2 = Bench.result_dir_2
-  result_dir3 = Bench.result_dir_3
-
-  acc_thresh = Bench.promise_accuracy
-  
-  ASC1 = findParetoConfigs(result_dir1, acc_thresh)
-  ASC2 = findParetoConfigs(result_dir2, acc_thresh)
-  ASC3 = findParetoConfigs(result_dir3, acc_thresh)
-
-
-  flags1 = flagsPerLayer(ASC1, Bench.num_layers)
-  flags2 = flagsPerLayer(ASC2, Bench.num_layers)
-  flags3 = flagsPerLayer(ASC3, Bench.num_layers)
-
-  return flags1, flags2, flags3
-
-
-
-
-if __name__ == "__main__":
-
-  Bench = bench_tuner_data["alexnet_cifar10"]  
-  dumpBenchPareto(Bench)
-
-  Bench = bench_tuner_data["alexnet2_cifar10"]  
-  dumpBenchPareto(Bench)
-
-  Bench = bench_tuner_data["vgg16_cifar10"]  
-  dumpBenchPareto(Bench)
-
-  Bench = bench_tuner_data["vgg16_cifar100"]  
-  dumpBenchPareto(Bench)
-
-  Bench = bench_tuner_data["resnet18_cifar10"]  
-  dumpBenchPareto(Bench)
-
-  Bench = bench_tuner_data["lenet_keras"]  
-  dumpBenchPareto(Bench)
-
-  Bench = bench_tuner_data["mobilenet_cifar10"]  
-  dumpBenchPareto(Bench)
-
-  Bench = bench_tuner_data["mobilenet_shallow"]  
-  dumpBenchPareto(Bench)
-
-  
-  #get_pareto_configs("")
-  
-  #SC, EC = compute_pareto_points(configuration_objects)
-  #ASC, AEC = compute_pareto_points_with_margin(configuration_objects, SPEEDUP_BAND_SIZE, ENERGY_BAND_SIZE)
-
-  #print(SC)
-  #print(EC)
-
-  #print(ASC)
-  #print(AEC)
-
-
-
-
-
-
-    #result_dir = base_dir + "/pareto/"
-  #try:
-  #    os.mkdir(result_dir)
-  #except:
-  #    print "could not create dir"
-
-  #input_dir = base_dir + "/full_results/"    
-  #result_dir = "../build_tuner/tuner_results/alexnet_cifar10/loss_3/batch15"
-  #config_arr = loadConfigData(input_dir, accuracy)
-
-  #config_list = []
-
-  #it = 0
-  #for config in config_arr:
-  #  config = Configuration(config.fname , config.speedup, 100, config.avg_accuracy, config.avg_loss)
-  #  config_list.append(config)
-
-    
-  #ASC, AEC = compute_pareto_points_with_margin(config_list, SPEEDUP_BAND_SIZE, ENERGY_BAND_SIZE)
-
-  #for conf in ASC:
-  #  dst_path = conf.name.replace("full_results", "pareto")
-  #  shutil.copy(conf.name, dst_path)
-    
-  
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/profiling.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/profiling.py
deleted file mode 100644
index 3ed37822a6fa654c16f5c8ce3b41dc8287931b87..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/profiling.py
+++ /dev/null
@@ -1,26 +0,0 @@
-
-import time
-
-profiled_ops = {}
-
-def startProfile(op_id):
-  start = time.time()
-  return start
-  
-
-def stopProfile(op_id, start):
-  end = time.time()
-  total_time = end - start
- 
-  profiled_ops[op_id] = total_time
- 
-  
-def dumpProfiles(file_name):
-
-  f = open(file_name, "w+")
-  for op_id in profiled_ops:
-    f.write(op_id + " : " + str(profiled_ops[op_id]) + "\n")
-
-  f.close()
-      
-    
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/remap.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/remap.py
deleted file mode 100644
index 8dc69357526b711d563d454f0ce41219dbfe579c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/remap.py
+++ /dev/null
@@ -1,291 +0,0 @@
-
-import sys
-import os
-import shutil
-from validation import invokeBinary
-from buildRtConfig import loadConfigData, loadPromiseConfigs
-from benchmarks import bench_tuner_data, batch_id
-from swing_selection import convL1bins, convL2bins
-
-
-
-def readKnobConfig(file_path):
-
-  knobs_speedup = {}
-  f = open(file_path, "r")
-  for x in f:
-    toks = x.split("\t")
-    ID = int(toks[0].split(",")[1])
-
-    speedup = float(toks[2])
-    knobs_speedup[ID] = speedup
-  
-  print ("knobs_speedup = ", knobs_speedup)
-  
-  return knobs_speedup
-
-
-
-def getPromiseSwing(l1, l2, flag):
-
-  if l1 < 0.1 or l2 < 0.1:
-    return flag
-      
-  swing = 1
-  for i in range(len(convL1bins)):
-    l1_t = convL1bins[i][0]
-    l2_t = convL2bins[i][0]
-    
-    if l1 > l1_t and l2 > l2_t:
-      break
-    swing += 1
-
-  return swing
-
-
-    
-def replaceWithPromise(layer_flags, norms_file):
-
-  num_layers = len(layer_flags)
-
-  f = open(norms_file, "r")
-  it = 0
-  for x in f:    
-    op_name = x.split()[0]
-    print ("op_name = ", op_name)
-    if op_name == "tensorMul":
-      break; 
-    
-    l1 = float(x.split()[5])
-    l2 = float(x.split()[6])
-
-    if it > 0:
-      flag = getPromiseSwing(l1, l2, layer_flags[it])
-      layer_flags[it] = flag
-    
-    #print ("l1 = ", l1, " l2 = ", l2)
-    it += 1                   
-
-    if it == num_layers:
-      break
-
-  print (layer_flags)
-  return layer_flags
-
-
-
-
-def readCostFile(file_path):
-
-  layer_costs = []
-  f = open(file_path)
-  for x in f:
-    cost = float(x.strip())
-    layer_costs.append(cost)
-
-  print ("len(layer_costs) = ", layer_costs)
-  f.close()
-
-  return layer_costs
-
-
-
-def getSpeedup(flags, knobs_speedup, layer_costs):
-
-  orig_cost = 0.0
-  total_cost = 0.0
-  it = 0
-  for flag_value in flags:
-    op_cost = layer_costs[it]
-    speedup = knobs_speedup[flag_value]
-
-    total_cost += (op_cost * 1.0 / speedup * 1.0)
-    orig_cost += op_cost    
-    it += 1
-
-  speedup = (orig_cost * 1.0) / (total_cost * 1.0)
-  
-  return speedup
-
-
-
-def dumpNewFlags(new_flags, orig_file, promise_flags_file, layer_costs, knobs_speedup):
-
-  speedup = getSpeedup(new_flags, knobs_speedup, layer_costs)
-  
-  top_line = ""
-  for x in open(orig_file, "r"):
-    top_line = x
-    break
-  
-  f = open(promise_flags_file, "w+")
-  f.write(top_line.replace("\n", ""))
-  f.write("\tnew_speedup=" + str(speedup) + "\n")
-  
-
-  for flag in new_flags:
-    f.write(str(flag) + "\n")
-    
-  f.close()
-
-
-  
-
-def remapLossConfig(configs_arr, result_dir, sub_dir, layer_costs, knobs_speedup):
-
-  
-  for conf in configs_arr:
-    layer_flags = conf.flags
-    fname = conf.fname
-    norms_file = result_dir + "/algo_tuner/" + sub_dir + "/" + fname + "_norms"
-    orig_file = result_dir + "/algo_tuner/" + sub_dir + "/" + fname
-    new_flags = replaceWithPromise(layer_flags, norms_file)
-
-    promise_test_dir = result_dir + "/algo_tuner/promise_test/"
-    if not os.path.exists(promise_test_dir):
-      os.mkdir(promise_test_dir)
-
-    promise_flags_file = result_dir + "/algo_tuner/promise_test/" + fname + "_promise"
-    dumpNewFlags(new_flags, orig_file, promise_flags_file, layer_costs, knobs_speedup)
-
-  
-
-def remapConfigs(Bench):
-
-  
-  loss1_dir = Bench.result_dir_1
-  loss2_dir = Bench.result_dir_2
-  loss3_dir = Bench.result_dir_3
-
-  loss1_configs = loadConfigData(loss1_dir, 100, "validated")
-  loss2_configs = loadConfigData(loss2_dir, 100, "validated")
-  loss3_configs = loadConfigData(loss3_dir, 100, "validated")
-
-  knobs_speedup = readKnobConfig("../opentuner/data/global_knobs.txt")
-  layer_costs = readCostFile(Bench.cost_file)
-
-  remapLossConfig(loss1_configs, loss1_dir, "validated", layer_costs, knobs_speedup)
-  remapLossConfig(loss2_configs, loss2_dir, "validated", layer_costs, knobs_speedup)
-  remapLossConfig(loss3_configs, loss3_dir, "validated", layer_costs, knobs_speedup)
-  
-  
-
-
-def validateRemapConfigs(Bench):
-
-  num_layers = Bench.num_layers
-  base_conf = getBaselineConfig(num_layers)
-  # Path to binary to run
-  binary_path = Bench.promise_binary
-  # NOTE: 'target_acc' passed 0.0 since unused for baseline run
-  invokeBinary(binary_path, base_conf, 1, 2000, 8000, 0.0)
-  gold_acc = readAccuracy("final_accuracy")
-
-  
-  loss1_dir = Bench.result_dir_1
-  loss2_dir = Bench.result_dir_2
-  loss3_dir = Bench.result_dir_3
-
-  loss1_configs = loadPromiseConfigs(loss1_dir, 100, "promise_test")
-  loss2_configs = loadPromiseConfigs(loss2_dir, 100, "promise_test")
-  loss3_configs = loadPromiseConfigs(loss3_dir, 100, "promise_test")
-
-  runs = 30
-  validateAlgoConfigs(binary_path, loss1_dir, loss1_configs, gold_acc, 1.0, runs)
-  validateAlgoConfigs(binary_path, loss2_dir, loss2_configs, gold_acc, 2.0, runs)
-  validateAlgoConfigs(binary_path, loss3_dir, loss3_configs, gold_acc, 3.0, runs)
-
-
-
-
-
-  
-    
-
-def copyNormFile(fname, result_dir, sub_dir):
-
-  target_dir = result_dir + "/algo_tuner/" + sub_dir
-  dest_file = target_dir + "/" + fname + "_norms"
-
-  shutil.copy("accuracy_summary", dest_file)  
-
-
-
-
-def dumpNorms(binary_path, result_dir, configs_arr):
-
-  runs = 1  
-  for conf in configs_arr:
-    layer_swings = conf.flags
-    invokeBinary(binary_path, layer_swings, runs, 3000, 5000, 100)
-    
-    #copyNormFile(conf.fname, result_dir, "high_confidence")
-    copyNormFile(conf.fname, result_dir, "validated")
-
-
-
-def computeConfigNorms(Bench):
-    
-  loss1_dir = Bench.result_dir_1
-  loss2_dir = Bench.result_dir_2
-  loss3_dir = Bench.result_dir_3
-
-  loss1_configs = loadConfigData(loss1_dir, 100, "validated")
-  loss2_configs = loadConfigData(loss2_dir, 100, "validated")
-  loss3_configs = loadConfigData(loss3_dir, 100, "validated")
-
-
-  binary_path = Bench.promise_binary
-
-  dumpNorms(binary_path, loss1_dir, loss1_configs)
-  dumpNorms(binary_path, loss2_dir, loss2_configs)
-  dumpNorms(binary_path, loss3_dir, loss3_configs)
-  
-
-
-if __name__ == "__main__":
-
-  Bench = bench_tuner_data["alexnet_cifar10"]      
-  computeConfigNorms(Bench)
-  remapConfigs(Bench)
-  #validateRemapConfigs(Bench)
-  
-  Bench = bench_tuner_data["alexnet2_cifar10"]      
-  computeConfigNorms(Bench)
-  remapConfigs(Bench)
-  #validateRemapConfigs(Bench)
-
-  Bench = bench_tuner_data["vgg16_cifar10"]      
-  computeConfigNorms(Bench)
-  remapConfigs(Bench)
-  #validateRemapConfigs(Bench)
-
-  Bench = bench_tuner_data["vgg16_cifar100"]      
-  computeConfigNorms(Bench)
-  remapConfigs(Bench)
-  #validateRemapConfigs(Bench)
-
-  Bench = bench_tuner_data["resnet18_cifar10"]      
-  computeConfigNorms(Bench)
-  remapConfigs(Bench)
-  #validateRemapConfigs(Bench)
-
-  Bench = bench_tuner_data["mobilenet_shallow"]  
-  computeConfigNorms(Bench)
-  remapConfigs(Bench)
-  #validateRemapConfigs(Bench)
-
-  Bench = bench_tuner_data["mobilenet_cifar10"]  
-  computeConfigNorms(Bench)
-  remapConfigs(Bench)
-  #validateRemapConfigs(Bench)
-
-  Bench = bench_tuner_data["lenet_keras"]  
-  computeConfigNorms(Bench)
-  remapConfigs(Bench)
-  #validateRemapConfigs(Bench)
-
-  #computeConfigNorms(Bench)
-  #remapConfigs(Bench)
-  
-  #validateRemapConfigs(Bench)
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_algo_tuner.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_algo_tuner.py
deleted file mode 100644
index 2df75fbfc4e7568361747f75f06a4b818a8f99be..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_algo_tuner.py
+++ /dev/null
@@ -1,102 +0,0 @@
-
-
-import os
-import subprocess
-from error_sensitivity import select_skip_layers
-
-
-def runAlgoTunerCmd(Bench, dir_prefix, result_dir, acc_threshold, autotuner_runs):
-
-  tuner_cmd = "python2  ../opentuner/autotuner/algo_tuner.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.promise_binary
-  tuner_cmd += " --num-layers "
-  tuner_cmd += str(Bench.num_layers)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += dir_prefix
-  tuner_cmd += result_dir + "/algo_tuner/"
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.promise_accuracy - acc_threshold)
-  tuner_cmd += " --cost-file "
-  tuner_cmd += Bench.cost_file
-  tuner_cmd += " --knobs-config "
-  tuner_cmd += "../opentuner/data/global_knobs.txt"
-  tuner_cmd += " --layer-knobs "
-  tuner_cmd += Bench.layer_knobs
-
-  
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-  
-
-"""
-
-def promiseTunerLoss1(Bench, dir_prefix):
-
-  tuner_runs = int(Bench.autotuner_runs / 3)
-  
-  skip_layers1 = "0"
-  skip_layers2 = "0_" + select_skip_layers(Bench, 30)
-  skip_layers3 = "0_" + select_skip_layers(Bench, 50)
-
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.85, tuner_runs, skip_layers1)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.85, tuner_runs, skip_layers2)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.85, tuner_runs, skip_layers3)
-
-
-def promiseTunerLoss2(Bench, dir_prefix):
-
-  tuner_runs = int(Bench.autotuner_runs / 3) 
-  
-  skip_layers1 = "0"
-  skip_layers2 = "0_" + select_skip_layers(Bench, 20)
-  skip_layers3 = "0_" + select_skip_layers(Bench, 40)
-
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.7, tuner_runs, skip_layers1)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.7, tuner_runs, skip_layers2)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.7, tuner_runs, skip_layers3)
-
-
-  
-def promiseTunerLoss3(Bench, dir_prefix):
-
-  tuner_runs = int (Bench.autotuner_runs / 3)
-  
-  skip_layers1 = "0"
-  skip_layers2 = "0_" + select_skip_layers(Bench, 10)
-  skip_layers3 = "0_" + select_skip_layers(Bench, 30)
-  
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_3,  2.5, tuner_runs, skip_layers1)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_3,  2.5, tuner_runs, skip_layers2)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_3,  2.5, tuner_runs, skip_layers3)
-
-
-"""
-
-
-BASELINE = True
-
-  
-def runAlgoBench(Bench):
-
-  # NOTE-IMP: Changing current directory to one with promise binaries
-  dir_prefix = "../build_tuner/"
-  
-
-  if BASELINE:
-    tuner_runs = Bench.autotuner_runs 
-    runAlgoTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.85, tuner_runs)
-    runAlgoTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.7, tuner_runs)
-    runAlgoTunerCmd(Bench, dir_prefix, Bench.result_dir_3, 2.5, tuner_runs)
-    
-  else:    
-    promiseTunerLoss1(Bench, dir_prefix)
-    promiseTunerLoss2(Bench, dir_prefix)
-    promiseTunerLoss3(Bench, dir_prefix)
-
-  
-  
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_algo_tuner2.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_algo_tuner2.py
deleted file mode 100644
index 99867fade3aac75d2fcc4c411e25c2d16595052d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_algo_tuner2.py
+++ /dev/null
@@ -1,186 +0,0 @@
-
-
-import os
-import numpy as np
-import subprocess
-from error_sensitivity import select_skip_layers
-from pareto_curve import dumpBenchPareto
-from remap import readCostFile
-
-
-def runAlgoTunerCmd(Bench, dir_prefix, result_dir, acc_threshold, autotuner_runs):
-
-  fixed_runs = 100
-  
-  tuner_cmd = "python2  ../opentuner/autotuner/algo_tuner2.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(fixed_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.promise_binary
-  tuner_cmd += " --num-layers "
-  tuner_cmd += str(Bench.num_layers)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += dir_prefix
-  tuner_cmd += result_dir + "/promise_tuner3/"
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.promise_accuracy - acc_threshold)
-  tuner_cmd += " --cost-file "
-  tuner_cmd += Bench.cost_file
-  tuner_cmd += " --layer-file "
-  tuner_cmd += Bench.layer_file
-  tuner_cmd += " --knobs-config "
-  tuner_cmd += "../opentuner/data/global_knobs.txt"
-  tuner_cmd += " --layer-knobs "
-  tuner_cmd += " local_knobs.txt "
-
-  
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-  
-
-  
-def is50Knob(flag):
-  
-  flags50 = {}
-  flags50[21] = 1
-  flags50[22] = 1
-  flags50[26] = 1
-  flags50[27] = 1
-  flags50[31] = 1
-  flags50[32] = 1
-
-  if flag in flags50:
-    return True
-  else:
-    return False
-  
-  
-
-def is25Knob(flag):
-
-  flags25 = {}
-  flags25[23] = 1
-  flags25[24] = 1
-  flags25[25] = 1
-  flags25[28] = 1
-  flags25[29] = 1
-  flags25[30] = 1
-  flags25[33] = 1
-  flags25[34] = 1
-  flags25[35] = 1
-  flags25[36] = 1
-
-  if flag in flags25:
-    return True
-  else:
-    return False
-  
-   
-  
-def addPromiseFlags(flag_map):
-
-  flags = []
-
-  has_50_flag = False
-  has_25_flag = False
- 
-  for flag in flag_map:
-    if is50Knob(flag):
-      has_50_flag = True   
-    if is25Knob(flag):
-      has_25_flag = True
-      
-
-  if has_50_flag:
-    flag_map[7] = 1
-    flag_map[5] = 1
-    flag_map[3] = 1
-
-  if has_25_flag:
-    flag_map[7] = 1
-
-  return flag_map
-
-
-
-def addCostBasedFlags(flag_map, layer_costs, i):
-
-  median = np.median(layer_costs)
-  max_cost = np.max(layer_costs)
-  sorted_vals = np.sort(layer_costs)
-  
-  print ("**** Median = ", median)
-  print ("**** Max_cost = ", max_cost)
-  print ("**** Sorted_vals = ", sorted_vals, "\n\n")
-
-  
-  if (layer_costs[i] > (median * 1.5)):
-    flag_map[7] = 1
-
-  if (layer_costs[i] > (median * 3)) or layer_costs[i] == max_cost:
-    flag_map[7] = 1
-    flag_map[5] = 1
-    flag_map[3] = 1  
-
-    
-  if (layer_costs[i] < (median / 10)):
-    flag_map = {}
-    flag_map[12] = 1
-
-  return flag_map  
-
-
-  
-
-  
-def constructKnobsFile(flags, layer_costs):
-
-  f = open("local_knobs.txt", "w+")
-  for i in range(len(flags)):
-    flag_map = flags[i]
-
-    if i > 0:
-      flag_map = addPromiseFlags(flag_map)
-      flag = addCostBasedFlags(flag_map, layer_costs, i)
-          
-    it = 0
-    for flag in flag_map:
-      f.write(str(flag))
-      if it < len(flag_map) - 1:
-        f.write(",")
-      it += 1  
-      
-    f.write("\n")
-
-  f.close()  
-
-
-
-
-  
-def runPromiseAlgoBench(Bench):
-
-  # NOTE-IMP: Changing current directory to one with promise binaries
-  dir_prefix = "../build_tuner/"
-  
-
-  tuner_runs = Bench.autotuner_runs
-
-  layer_costs = readCostFile(Bench.cost_file)
-
-  flags1, flags2, flags3 = dumpBenchPareto(Bench)
-  
-  constructKnobsFile(flags1, layer_costs)
-  runAlgoTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.8, tuner_runs)
-
-  constructKnobsFile(flags2, layer_costs)
-  runAlgoTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.6, tuner_runs)
-
-  constructKnobsFile(flags3, layer_costs)
-  runAlgoTunerCmd(Bench, dir_prefix, Bench.result_dir_3, 2.2, tuner_runs)
-    
- 
-  
-  
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_autotuner.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_autotuner.py
deleted file mode 100644
index 1e533b8702139966166f860b72a3df1ccae03ee6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_autotuner.py
+++ /dev/null
@@ -1,346 +0,0 @@
-
-import os
-import sys
-import subprocess
-import shutil
-
-from swing_selection import loadLayerDesc
-from error_sensitivity import test_sensitivity, test_sensitivity2, test_sensitivity3, test_sensitivity4  
-from benchmarks import bench_tuner_data
-from run_psnr import runPSNRTuner
-from run_ha_tuner import runTunerBench
-from run_hs_tuner import runPromiseBench
-from run_algo_tuner import runAlgoBench
-from compute_confs import computePSNRBenchSwings, computeBenchSwings
-from validation import runPromiseBenchValidation2, runBenchValidation, runAlgoBenchValidate  
-from profiling import startProfile, stopProfile, dumpProfiles  
-from utils import createResultDirs
-from benchmarks import batch_id
-  
-
-  
-  
-def runTunerValidation():
-
-  runBenchValidation(bench_tuner_data["mobilenet_shallow"])
- 
-  #runBenchValidation("mobilenet_cifar10")
- 
-  #runBenchValidation("alexnet_cifar10")
-  #runBenchValidation("vgg16_cifar10")
-  #runBenchValidation("alexnet2_cifar10")
-  #runBenchValidation("resnet18_cifar10") 
-  #runBenchValidation("vgg16_cifar100")
-  
-
-def computeLayerSwings():
-
-
-  computeBenchSwings(bench_tuner_data["mobilenet_shallow"])
-
-  #computeBenchSwings("mobilenet_cifar10")
-
-  #computeBenchSwings("mobilenet_cifar10")
-
-  #computeBenchSwings("lenet_keras")
-  #computeBenchSwings("alexnet_cifar10")
-  #computeBenchSwings("alexnet2_cifar10")
-  #computePSNRBenchSwings("pipeline_GEOM")
-  #computePSNRBenchSwings("pipeline_GEMO")
-  #computePSNRBenchSwings("pipeline_GEO")
-  #computePSNRBenchSwings("pipeline_GSM")
-  #computePSNRBenchSwings("pipeline_GSME")
-
-  
-
-
-  
-def runPromiseTuner():
-
-  
-  start = startProfile("MobileNet")  
-  runPromiseBench(bench_tuner_data["mobilenet_cifar10"])
-  stopProfile("MobileNet", start)
-  
-  start = startProfile("Alexnet")  
-  runPromiseBench(bench_tuner_data["alexnet_cifar10"])
-  stopProfile("Alexnet", start)
-
-  start = startProfile("Alexnet2")  
-  runPromiseBench(bench_tuner_data["alexnet2_cifar10"])
-  stopProfile("Alexnet2", start)  
-
-  start = startProfile("VGG16_10")  
-  runPromiseBench(bench_tuner_data["vgg16_cifar10"])
-  stopProfile("VGG16_10", start)  
-
-  start = startProfile("VGG16_100")  
-  runPromiseBench(bench_tuner_data["vgg16_cifar100"])
-  stopProfile("VGG16_100", start)
-
-  start = startProfile("ResNet")  
-  runPromiseBench(bench_tuner_data["resnet18_cifar10"])
-  stopProfile("ResNet", start)  
-
-  start = startProfile("MobileNet-SH")  
-  runPromiseBench(bench_tuner_data["mobilenet_shallow"])
-  stopProfile("MobileNet-SH", start)  
-  
-  start = startProfile("LeNet")  
-  runPromiseBench(bench_tuner_data["lenet_keras"])
-  stopProfile("LeNet", start)
-  
-
-
-  #runPSNRPromiseBench("pipeline_GEOM")
-  #runPSNRPromiseBench("pipeline_GEMO")
-  #runPSNRPromiseBench("pipeline_GEO")
-  #runPSNRPromiseBench("pipeline_GSM")
-  #runPSNRPromiseBench("pipeline_GSME")
-
-  dumpProfiles("time_profile" + batch_id + ".txt")
-  
-
-
-  
-def runPromiseValidation():
-
-
-  start = startProfile("AlexNet")    
-  runPromiseBenchValidation2(bench_tuner_data["alexnet_cifar10"])
-  stopProfile("AlexNet", start)  
-
-  start = startProfile("AlexNet2")    
-  runPromiseBenchValidation2(bench_tuner_data["alexnet2_cifar10"])
-  stopProfile("AlexNet2", start)  
-
-  start = startProfile("VGG16_100")    
-  runPromiseBenchValidation2(bench_tuner_data["vgg16_cifar100"])
-  stopProfile("VGG16_100", start)  
-
-  start = startProfile("VGG16_10")    
-  runPromiseBenchValidation2(bench_tuner_data["vgg16_cifar10"])
-  stopProfile("VGG16_10", start)  
-  #runPromiseBenchValidation2(bench_tuner_data["lenet_keras"])
-
-  start = startProfile("ResNet")    
-  runPromiseBenchValidation2(bench_tuner_data["resnet18_cifar10"])
-  stopProfile("ResNet", start)  
-
-  start = startProfile("MobileNet_SH")  
-  runPromiseBenchValidation2(bench_tuner_data["mobilenet_shallow"])
-  stopProfile("MobileNet_SH", start)  
-
-  start = startProfile("MobileNet")    
-  runPromiseBenchValidation2(bench_tuner_data["mobilenet_cifar10"])
-  stopProfile("MobileNet", start)  
-
-  
-  dumpProfiles("validation_prof" + batch_id + ".txt")
-
-  
-  
-
-def runAutotuner(): 
-
-  runTunerBench(bench_tuner_data["alexnet_cifar10"])
-  runTunerBench(bench_tuner_data["alexnet2_cifar10"])
-
-  #runTunerBench("mobilenet_shallow")
-  #runTunerBench("mobilenet_cifar10")
-  
-  #runTunerBench("lenet_keras")
-  #runTunerBench("resnet18_cifar10")
-  #runTunerBench("vgg16_cifar10")
-
-  #runPSNRTuner("pipeline_GEOM")
-  #runPSNRTuner("pipeline_GEMO")
-  #runPSNRTuner("pipeline_GEO")
-  #runPSNRTuner("pipeline_GSM")
-  #runPSNRTuner("pipeline_GSME")
-
-
-
-
-def runSensAnalysis():
- 
-  start = startProfile("LeNet")  
-  test_sensitivity4(bench_tuner_data["lenet_keras"])
-  stopProfile("LeNet", start)  
-
-  """
-  start = startProfile("AlexNet")  
-  test_sensitivity4(bench_tuner_data["alexnet_cifar10"])
-  stopProfile("AlexNet", start)  
-
-  start = startProfile("AlexNet2")  
-  test_sensitivity4(bench_tuner_data["alexnet2_cifar10"])
-  stopProfile("AlexNet2", start)  
-
-  start = startProfile("ResNet")  
-  test_sensitivity4(bench_tuner_data["resnet18_cifar10"])
-  stopProfile("ResNet", start)  
-
-  start = startProfile("MobileNet")  
-  test_sensitivity4(bench_tuner_data["mobilenet_cifar10"])
-  stopProfile("MobileNet", start)  
-
-  start = startProfile("MobileNet_SH")  
-  test_sensitivity4(bench_tuner_data["mobilenet_shallow"])
-  stopProfile("MobileNet_SH", start)  
-
-  start = startProfile("VGG_10")  
-  test_sensitivity4(bench_tuner_data["vgg16_cifar10"])
-  stopProfile("VGG16_10", start)  
-
-  start = startProfile("VGG_100")  
-  test_sensitivity4(bench_tuner_data["vgg16_cifar100"]) 
-  stopProfile("VGG16_100", start)  
-
-  dumpProfiles("sens_time_prof.txt")
-
-  """
-  
-  start = startProfile("LeNet")  
-  test_sensitivity3(bench_tuner_data["lenet_keras"])
-  stopProfile("LeNet", start)  
-
-  start = startProfile("AlexNet")  
-  test_sensitivity3(bench_tuner_data["alexnet_cifar10"])
-  stopProfile("AlexNet", start)  
-
-  start = startProfile("AlexNet2")  
-  test_sensitivity3(bench_tuner_data["alexnet2_cifar10"])
-  stopProfile("AlexNet2", start)  
-
-  start = startProfile("ResNet")  
-  test_sensitivity3(bench_tuner_data["resnet18_cifar10"])
-  stopProfile("ResNet", start)  
-
-
-  start = startProfile("MobileNet")  
-  test_sensitivity3(bench_tuner_data["mobilenet_cifar10"])
-  stopProfile("MobileNet", start)  
-
-  start = startProfile("MobileNet_SH")  
-  test_sensitivity3(bench_tuner_data["mobilenet_shallow"])
-  stopProfile("MobileNet_SH", start)  
-
-  start = startProfile("VGG_10")  
-  test_sensitivity3(bench_tuner_data["vgg16_cifar10"])
-  stopProfile("VGG16_10", start)  
-
-  start = startProfile("VGG_100")  
-  test_sensitivity3(bench_tuner_data["vgg16_cifar100"]) 
-  stopProfile("VGG16_100", start)  
-
-  dumpProfiles("sens_time_prof.txt")
-
-  
-  """
-  test_sensitivity2(bench_tuner_data["fc4"]) 
-  test_sensitivity2(bench_tuner_data["lenet_keras"]) 
-  test_sensitivity2(bench_tuner_data["mobilenet_cifar10"]) 
-  test_sensitivity2(bench_tuner_data["mobilenet_shallow"]) 
-  test_sensitivity2(bench_tuner_data["resnet18_cifar10"]) 
-  test_sensitivity2(bench_tuner_data["alexnet_cifar10"]) 
-  test_sensitivity2(bench_tuner_data["alexnet2_cifar10"]) 
-  test_sensitivity2(bench_tuner_data["vgg16_cifar10"]) 
-  test_sensitivity2(bench_tuner_data["vgg16_cifar100"]) 
-
-
-  test_sensitivity(bench_tuner_data["fc4"]) 
-  test_sensitivity(bench_tuner_data["lenet_keras"]) 
-  test_sensitivity(bench_tuner_data["mobilenet_cifar10"]) 
-  test_sensitivity(bench_tuner_data["mobilenet_shallow"]) 
-  test_sensitivity(bench_tuner_data["resnet18_cifar10"]) 
-  test_sensitivity(bench_tuner_data["alexnet_cifar10"]) 
-  test_sensitivity(bench_tuner_data["alexnet2_cifar10"]) 
-  test_sensitivity(bench_tuner_data["vgg16_cifar10"]) 
-  test_sensitivity(bench_tuner_data["vgg16_cifar100"]) 
-  """
-  
-
-
-def runAlgoTuner():
-
-  Bench = bench_tuner_data["alexnet_cifar10"]  
-  runAlgoBench(Bench)
-
-  Bench = bench_tuner_data["mobilenet_shallow"]  
-  runAlgoBench(Bench)
-
-  Bench = bench_tuner_data["mobilenet_cifar10"]  
-  runAlgoBench(Bench)
-
-  Bench = bench_tuner_data["vgg16_cifar10"]  
-  runAlgoBench(Bench)
-
-  Bench = bench_tuner_data["lenet_keras"]  
-  runAlgoBench(Bench)
-
-  Bench = bench_tuner_data["alexnet2_cifar10"]  
-  runAlgoBench(Bench)
-
-
-  Bench = bench_tuner_data["vgg16_cifar100"]  
-  runAlgoBench(Bench)
-
-  Bench = bench_tuner_data["resnet18_cifar10"]  
-  runAlgoBench(Bench)
-
-
-
-  
-def runAlgoTunerValidation():
-
-
-  Bench = bench_tuner_data["alexnet_cifar10"]  
-  runAlgoBenchValidate(Bench)
-
-  Bench = bench_tuner_data["mobilenet_shallow"]  
-  runAlgoBenchValidate(Bench)
-
-  Bench = bench_tuner_data["mobilenet_cifar10"]  
-  runAlgoBenchValidate(Bench)
-
-  Bench = bench_tuner_data["vgg16_cifar10"]  
-  runAlgoBenchValidate(Bench)
-
-  Bench = bench_tuner_data["lenet_keras"]  
-  runAlgoBenchValidate(Bench)
-
-  Bench = bench_tuner_data["alexnet2_cifar10"]  
-  runAlgoBenchValidate(Bench)
-
-  Bench = bench_tuner_data["vgg16_cifar100"]  
-  runAlgoBenchValidate(Bench)
-
-  Bench = bench_tuner_data["resnet18_cifar10"]  
-  runAlgoBenchValidate(Bench)
-
-  
-
-  
-  
-if __name__ == "__main__":
-
-  createResultDirs(bench_tuner_data)
-  
-  #runAutotuner()
-
-  #runTunerValidation()
-
-  #computeLayerSwings()
-  
-  #runPromiseTuner()    
-
-  #runAlgoTuner()
-
-  runAlgoTunerValidation()
-  
-  #runPromiseValidation()
-
-  #runSensAnalysis()
-
-    
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_ha_tuner.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_ha_tuner.py
deleted file mode 100644
index 055d2c4c1bde6bf02e080c53101f03dc1791fd9e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_ha_tuner.py
+++ /dev/null
@@ -1,52 +0,0 @@
-
-
-
-import subprocess
-
-
-
-#, bench_name
-def runTunerBench(Bench):
-
-  #Bench = bench_tuner_data[bench_name]
-
-  #FIXIT: Replace  approxhpvm_tuner2 with  approxhpvm_tuner
-  tuner_cmd = "python  ../opentuner/autotuner/approxhpvm_tuner.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(Bench.autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.tuner_binary
-  tuner_cmd += " --num-flags "
-  tuner_cmd += str(Bench.num_flags)
-  tuner_cmd += " --error-range "
-  tuner_cmd += str(Bench.error_range_2)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += Bench.result_dir_2
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.tuner_accuracy - 1.70)
-
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-
-
-  tuner_cmd = "python  ../opentuner/autotuner/approxhpvm_tuner.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(Bench.autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.tuner_binary
-  tuner_cmd += " --num-flags "
-  tuner_cmd += str(Bench.num_flags)
-  tuner_cmd += " --error-range "
-  tuner_cmd += str(Bench.error_range_1)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += Bench.result_dir_1
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.tuner_accuracy - 0.85)
-
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_hs_tuner.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_hs_tuner.py
deleted file mode 100644
index f1a9c8f417bafdf4084a687670074101bec3faa0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_hs_tuner.py
+++ /dev/null
@@ -1,185 +0,0 @@
-
-
-import os
-import subprocess
-from error_sensitivity import select_skip_layers
-
-
-def runPromiseTunerCmd(Bench, dir_prefix, result_dir, acc_threshold, autotuner_runs, skip_layers):
-
-  tuner_cmd = "python2  ../opentuner/autotuner/promise_tuner3.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.promise_binary
-  tuner_cmd += " --num-flags "
-  tuner_cmd += str(Bench.num_layers)
-  tuner_cmd += " --start-range "
-  tuner_cmd += str(Bench.start_promise_range)
-  tuner_cmd += " --error-range "
-  #tuner_cmd += str(10)
-  # NOTE: Increasing flags from ApproxTechiqueTuner
-  tuner_cmd += str(12) 
-  tuner_cmd += " --result-dir "
-  tuner_cmd += dir_prefix
-  tuner_cmd += result_dir + "/promise_tuner/"
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.promise_accuracy - acc_threshold)
-  tuner_cmd += " --layer-file "
-  tuner_cmd += dir_prefix
-  tuner_cmd += Bench.tensor_desc_file
-  # NOTE: Cost file is new addition - ***NOT*** present in promisetuner1 and promisetuner2
-  tuner_cmd += " --cost-file "
-  tuner_cmd += Bench.cost_file
-  #tuner_cmd += " --gpu-layers "
-  #tuner_cmd += str(Bench.skip_layers)
-  tuner_cmd += " --gpu-layers 0 "
-  tuner_cmd += " --skip-layers \""
-  #tuner_cmd += str(Bench.skip_layer_str) + "\""
-  tuner_cmd += str(skip_layers) + "\""
-
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-  
-
-
-
-def promiseTunerLoss1(Bench, dir_prefix):
-
-  tuner_runs = Bench.autotuner_runs 
-  
-  skip_layers1 = "0"
-  skip_layers2 = "0_" + select_skip_layers(Bench, 30)
-  skip_layers3 = "0_" + select_skip_layers(Bench, 50)
-
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.85, tuner_runs, skip_layers1)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.85, tuner_runs, skip_layers2)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.85, tuner_runs, skip_layers3)
-
-
-def promiseTunerLoss2(Bench, dir_prefix):
-
-  tuner_runs = Bench.autotuner_runs 
-  
-  skip_layers1 = "0"
-  skip_layers2 = "0_" + select_skip_layers(Bench, 20)
-  skip_layers3 = "0_" + select_skip_layers(Bench, 40)
-
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.7, tuner_runs, skip_layers1)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.7, tuner_runs, skip_layers2)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.7, tuner_runs, skip_layers3)
-
-
-  
-def promiseTunerLoss3(Bench, dir_prefix):
-
-  tuner_runs = Bench.autotuner_runs 
-  
-  skip_layers1 = "0"
-  skip_layers2 = "0_" + select_skip_layers(Bench, 10)
-  skip_layers3 = "0_" + select_skip_layers(Bench, 30)
-  
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_3,  2.5, tuner_runs, skip_layers1)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_3,  2.5, tuner_runs, skip_layers2)
-  runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_3,  2.5, tuner_runs, skip_layers3)
-  
-
-BASELINE = True
-
-  
-def runPromiseBench(Bench):
-
-  # NOTE-IMP: Changing current directory to one with promise binaries
-  dir_prefix = "../build_tuner/"
-  
-
-  if BASELINE:
-    tuner_runs = Bench.autotuner_runs * 2
-    skip_layers = "0"
-    runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.85, tuner_runs, skip_layers)
-    runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.7, tuner_runs, skip_layers)
-    runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_3, 2.5, tuner_runs, skip_layers)
-    
-  else:
-    
-    promiseTunerLoss1(Bench, dir_prefix)
-
-    promiseTunerLoss2(Bench, dir_prefix)
-
-    promiseTunerLoss3(Bench, dir_prefix)
-
-  
-  
-  
-  """  
-  #tuner_cmd = "python  ../opentuner/autotuner/promise_tuner2.py "
-  tuner_cmd = "python  ../opentuner/autotuner/promise_tuner3.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(Bench.autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.promise_binary
-  tuner_cmd += " --num-flags "
-  tuner_cmd += str(Bench.num_layers)
-  tuner_cmd += " --start-range "
-  tuner_cmd += str(Bench.start_promise_range)
-  tuner_cmd += " --error-range "
-  #tuner_cmd += str(10)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += result_dir_prefix
-  tuner_cmd += Bench.result_dir_2 + "/promise_tuner/"
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.promise_accuracy - 1.90)
-  tuner_cmd += " --layer-file "
-  tuner_cmd += result_dir_prefix
-  tuner_cmd += Bench.tensor_desc_file
-  # NOTE: Cost file is new addition - ***NOT*** present in promisetuner1 and promisetuner2
-  tuner_cmd += " --cost-file "
-  tuner_cmd += Bench.cost_file
-  #tuner_cmd += " --gpu-layers "
-  #tuner_cmd += str(Bench.skip_layers)
-  tuner_cmd += " --gpu-layers 0 "
-  tuner_cmd += " --skip-layers \""
-  tuner_cmd += str(Bench.skip_layer_str) + "\""
-
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-
-  #tuner_cmd = "python  ../opentuner/autotuner/promise_tuner2.py "
-  tuner_cmd = "python  ../opentuner/autotuner/promise_tuner3.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(Bench.autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.promise_binary
-  tuner_cmd += " --num-flags "
-  tuner_cmd += str(Bench.num_layers)
-  tuner_cmd += " --start-range "
-  tuner_cmd += str(Bench.start_promise_range)
-  tuner_cmd += " --error-range "
-  tuner_cmd += str(10)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += result_dir_prefix
-  tuner_cmd += Bench.result_dir_1 + "/promise_tuner/"
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.promise_accuracy - 0.95)
-  tuner_cmd += " --layer-file "
-  tuner_cmd += result_dir_prefix
-  tuner_cmd += Bench.tensor_desc_file
-  tuner_cmd += " --cost-file "
-  tuner_cmd += Bench.cost_file
-  #tuner_cmd += " --gpu-layers "
-  #tuner_cmd += str(Bench.skip_layers)
-  tuner_cmd += " --gpu-layers 0 "
-  tuner_cmd += " --skip-layers \""
-  tuner_cmd += str(Bench.skip_layer_str) + "\""
-
-  
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-
-  """
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_psnr.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_psnr.py
deleted file mode 100644
index 77e70609b89f200e37af1a12348874f9d447d0cd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/run_psnr.py
+++ /dev/null
@@ -1,143 +0,0 @@
-
-
-import subprocess
-
-
-def gen30dbFile():
-
-  f = open("psnr.txt", "w+");
-  f.write("30");
-  f.close()
-  
-
-def gen20dbFile():
-
-  f = open("psnr.txt", "w+");
-  f.write("20");
-  f.close()
-
-
-
-def runPSNRTuner(bench_name):
-
-  Bench = bench_tuner_data[bench_name]
-
-  # 20DB run
-  gen20dbFile()  
-  tuner_cmd = "python  ../opentuner/autotuner/approxhpvm_tuner.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(Bench.autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.tuner_binary
-  tuner_cmd += " --num-flags "
-  tuner_cmd += str(Bench.num_flags)
-  tuner_cmd += " --error-range "
-  tuner_cmd += str(Bench.error_range_2)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += Bench.result_dir_2
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.tuner_accuracy)
-
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-
-
-  # 30DB run
-  gen30dbFile()
-  tuner_cmd = "python  ../opentuner/autotuner/approxhpvm_tuner.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(Bench.autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.tuner_binary
-  tuner_cmd += " --num-flags "
-  tuner_cmd += str(Bench.num_flags)
-  tuner_cmd += " --error-range "
-  tuner_cmd += str(Bench.error_range_1)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += Bench.result_dir_1
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.tuner_accuracy)
-
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-
-
-
-def runPSNRPromiseBench(bench_name):
-
-  # NOTE-IMP: Changing current directory to one with promise binaries
-  #os.chdir("../build_promise/")
-  result_dir_prefix = "../build_tuner/"
-  
-  Bench = bench_tuner_data[bench_name]
-
-  # 20db Run
-  gen20dbFile()
-  tuner_cmd = "python  ../opentuner/autotuner/promise_tuner2.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(Bench.autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.promise_binary
-  tuner_cmd += " --num-flags "
-  tuner_cmd += str(Bench.num_layers)
-  tuner_cmd += " --start-range "
-  tuner_cmd += str(Bench.start_promise_range)
-  tuner_cmd += " --error-range "
-  tuner_cmd += str(10)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += result_dir_prefix
-  tuner_cmd += Bench.result_dir_2 + "/promise_tuner/"
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.promise_accuracy)
-  tuner_cmd += " --layer-file "
-  tuner_cmd += result_dir_prefix
-  tuner_cmd += Bench.tensor_desc_file
-  tuner_cmd += " --gpu-layers 0 "
-  tuner_cmd += " --skip-layers \""
-  tuner_cmd += str(Bench.skip_layer_str) + "\""
-
-  
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-
-  # 30DB run
-  gen30dbFile()
-  tuner_cmd = "python  ../opentuner/autotuner/promise_tuner2.py "
-  tuner_cmd += " --test-limit "
-  tuner_cmd += str(Bench.autotuner_runs)
-  tuner_cmd += " --binary ./"
-  tuner_cmd += Bench.promise_binary
-  tuner_cmd += " --num-flags "
-  tuner_cmd += str(Bench.num_layers)
-  tuner_cmd += " --start-range "
-  tuner_cmd += str(Bench.start_promise_range)
-  tuner_cmd += " --error-range "
-  tuner_cmd += str(10)
-  tuner_cmd += " --result-dir "
-  tuner_cmd += result_dir_prefix
-  tuner_cmd += Bench.result_dir_1 + "/promise_tuner/"
-  tuner_cmd += " --accuracy "
-  tuner_cmd += str(Bench.promise_accuracy)
-  tuner_cmd += " --layer-file "
-  tuner_cmd += result_dir_prefix
-  tuner_cmd += Bench.tensor_desc_file
-  tuner_cmd += " --gpu-layers 0 "
-  tuner_cmd += " --skip-layers \""
-  tuner_cmd += str(Bench.skip_layer_str) + "\""
-
-  
-  print (tuner_cmd)
-
-  p = subprocess.Popen(tuner_cmd, shell=True)
-  p.wait()
-
-
-
-
-  
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/swing_selection.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/swing_selection.py
deleted file mode 100644
index 399143c357c618aeba1665f5f1b8ecda4097d84c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/swing_selection.py
+++ /dev/null
@@ -1,304 +0,0 @@
-
-
-import os
-import warnings
-import matplotlib.pyplot as plt
-import matplotlib.cm as cm
-from matplotlib.ticker import MultipleLocator
-import numpy as np
-from scipy.signal import savgol_filter
-import math
-import struct
-
-
-
-def readDataFromText(textFile):
-    results = []
-    with open(textFile, "r") as f:
-        for line in f:
-            token = line.split("\t")
-            if (len(token) < 7):
-                continue
-            record = (token[0], float(token[1]), float(token[5]), float(token[6]))
-            results.append(record)
-    return results
-
-
-convL1bins =  [(0.985901, 1.36474), (0.852871, 1.16982), (0.422283, 0.55701), (0.259752, 0.335259), (0.216577, 0.277843), (0.185812, 0.23733), (0.148996, 0.189171), (0.100007, 0.125816), (0.0003127876261714846, 0.014511194080114365)]
-convL2bins =  [(0.995298, 1.3643), (0.861066, 1.16279), (0.426857, 0.547827), (0.262645, 0.330186), (0.218984, 0.273731), (0.187878, 0.233872), (0.150619, 0.186512), (0.10106, 0.124477), (0.00035427528200671077, 0.020199092105031013)]
-
-biasL1bins = [(0.3510325849056244, 0.49078235030174255), (0.30895063281059265, 0.4311973750591278), (0.16023841500282288, 0.22283604741096497), (0.099583700299263, 0.1381179839372635), (0.08340170979499817, 0.11503150314092636), (0.07280077040195465, 0.09948030859231949), (0.05857400223612785, 0.07965542376041412), (0.04044099152088165, 0.054193537682294846), (0.0, 0.0)]
-biasL2bins = [(0.4154910147190094, 0.5820578932762146), (0.3656001389026642, 0.5121639370918274), (0.18930286169052124, 0.2637346684932709), (0.11687946319580078, 0.16306844353675842), (0.09796475619077682, 0.13558265566825867), (0.0848352462053299, 0.11619425565004349), (0.06783176958560944, 0.09277229756116867), (0.046059850603342056, 0.062238890677690506), (0.0, 0.0)]
-
-gemmL1bins=  [(0.711203, 0.772211), (0.625894, 0.679601), (0.322665, 0.350383), (0.199646, 0.216727), (0.166556, 0.180781), (0.142945, 0.155132), (0.114662, 0.124399), (0.0771065, 0.0835984), (0.00034660729579627514, 0.008546584285795689)]
-gemmL2bins=  [(0.715208, 0.768102), (0.629411, 0.675947), (0.324433, 0.348358), (0.200659, 0.21539), (0.167381, 0.179634), (0.143637, 0.154119), (0.115197, 0.123548), (0.0774642, 0.0829647), (0.0003496285935398191, 0.009841435588896275)]
-
-
-
-def findBinByOp(op):
-    if op == 'tensorConv':
-        return convL1bins, convL2bins
-    if op == 'tensorAdd':
-        return biasL1bins, biasL2bins
-    if op == 'tensorGemm':
-        return gemmL1bins, gemmL2bins
-
-    return None, None
-
-
-def getSwing(Lx, opLxbin):
-    if opLxbin == None:
-        return 0
-    for i, (minT, maxT) in enumerate(opLxbin):
-        if Lx > minT:
-            return i
-
-    return 9
-
-
-
-def getConfiguration(L_thresholds):
-    configuration = []
-    for l in L_thresholds:
-        # L0 is op_type
-        opL1bin, opL2bin = findBinByOp(l[0])
-        # NOTE: L2 is L1 error, L3 is L2 error
-        sL1 = getSwing(l[2], opL1bin)
-        sL2 = getSwing(l[3], opL2bin)
-        if sL1 < 7:
-            sL1 = sL1 + 1
-        if sL2 < 7:
-            sL2 = sL2 + 1
-        configuration.append((l[0], l[1], l[2], l[3], sL1, sL2, max(sL1, sL2)))
-
-    return configuration
-
-
-def displayConfig(config):
-    for c in config:
-        print(c)
-
-def displayMultipleConfigurations(configurations):
-    for f, c in configurations.items():
-        print(f)
-        displayConfig(c)
-        print()
-
-def getConfigFromFile(filename):
-    L_requirements = readDataFromText(filename)
-    config = getConfiguration(L_requirements)
-    return config
-
-
-def getConfigurationsFromDir(dirname):
-    configurations = dict()
-    for f in os.listdir(dirname):
-        configurations[f] = getConfigFromFile(os.path.join(dirname, f))
-
-    return configurations
-              
-
-def getLayerWiseTarget(config):
-    target = []
-    for i, op in enumerate(config):
-        if (op[0] == 'tensorGemm') or (op[0] == 'tensorConv'):
-            t = op[6]
-            for j in range(i+1, len(config)):
-                if config[j][0] == 'tensorGemm' or config[j][0] == 'tensorConv':
-                    break
-                t = max(t, config[j][6])
-            target.append(t)
-            t = 0
-
-    return target
-
-
-def dumpLayerWiseTarget(file, targets):
-    with open(file, "w") as f:
-        for name, t in targets.items():
-            f.write(name)
-            f.write(" ")
-            for i in t:
-                f.write(str(i))
-                f.write(" ")
-            f.write("\n")
-
-
-def getTargetsFromConfigurations(configs):
-    targets = dict()
-    for f, c in configs.items():
-        targets[f] = [d[6] for d in c]
-
-    return targets
-                
-
-def dumpBenchmarkTargets(name, benchmark_dir):
-    benchmark_targets = dict()
-    error = ['linear', 'log', 'quad']
-    for e in error:
-        results_dir = os.path.join(benchmark_dir, e)
-        configs = getConfigurationsFromDir(results_dir)
-        benchmark_targets[e] = getTargetsFromConfigurations(configs)
-
-    return benchmark_targets
-
-
-
-def dumpTargets(filename, targets):
-    with open(filename, "w") as f:
-        for e, file_configs in targets.items():
-            for name, config in file_configs.items():
-                for c in config:
-                    f.write(str(c))
-                    f.write(" ")
-                f.write("\n")
-
-
-                
-def getLayerSwings(layer_desc, configurations):
-
-    layer_swings = []
-    for i in range(len(configurations)):
-      config_vals = configurations[i]
-      if len(config_vals) == 0:
-        continue
-      
-      layer_index = 0
-      index = 0
-      swing_vals = []
-                   
-      while layer_index < len(layer_desc):
-        if len(layer_desc[layer_index]) == 1:
-          promise_swing = config_vals[index]
-          layer_type = layer_desc[layer_index][0]
-          layer_type = layer_type.strip()
-          print ("****layer_type = ", layer_type)
-          if layer_type != "conv" and layer_type != "dense":
-            promise_swing = -9
-          if layer_type == "depthwise_conv":
-            promise_swing = -9  
-          index += 1
-        else:
-          #print ("index = ", index)
-          # FIXIT: Doesn't look right
-          print (config_vals[index], config_vals[index+1])
-          promise_swing = max(config_vals[index], config_vals[index+1])                  
-          stride = len(layer_desc[layer_index])
-          index += stride
-          
-        swing_vals.append(promise_swing)
-        layer_index += 1  
-        
-      layer_swings.append(swing_vals)
-
-    return layer_swings
-
-                   
-                
-
-def loadLayerDesc(layer_desc_file):
-
-    layer_desc = []
-    f = open(layer_desc_file)
-    for x in f:
-      vals = x.split()
-      layer_desc.append(vals)
-
-    return layer_desc
-      
-
-
-def dumpLayerTargets(targets, tuned_result_dir, layer_desc_file):
-
-    layer_desc = loadLayerDesc(layer_desc_file)
-    print (layer_desc)
-
-    file_names = []
-    configurations = []
-    for e, file_configs in targets.items():
-      for name, config in file_configs.items():
-        config_vals = []  
-        for c in config:
-          config_vals.append(c)         
-        print (config_vals)
-
-        configurations.append(config_vals)
-
-        rank = e + "_" +  "_".join(name.split("_")[-2:])
-        file_names.append(rank)
-        
-        
-    # NOTE: get PROMISE swing values corresponding to each layer
-    layer_swings = getLayerSwings(layer_desc, configurations)
-
-    targets_file_path = tuned_result_dir + "/layer_targets.txt"
-    f = open(targets_file_path, "w+")
-
-    for config in layer_swings:
-      index = 0
-      for swing in config:
-        swing_str = ""
-        if swing == 8 or swing == 9:
-          layer_size = len(layer_desc[index])
-          for i in range(layer_size):
-            swing_str += str(swing)
-            if i < layer_size - 1:
-              swing_str += " "
-        elif swing == -9:
-          swing_str += "8"                   
-        else:
-          swing_str += str(swing)
-
-        if index < len(config) - 1:
-          swing_str += ","    
-          
-        f.write(swing_str)
-        index += 1
-        
-      f.write("\n")
-        
-    f.close()
-    
-    print(layer_swings)    
-    return layer_swings, file_names
-
-
-
-def replaceFirstLayer(layer_swings):
-
-  # Ensuring first conv on GPU
-  for conf in layer_swings:
-    conf[0] = 9
-    
-    
-    
-def computeLayerTargets(tuned_result_dir, layer_desc_file):
-
-    targets_file_path = tuned_result_dir + "/tensor_targets.txt"
-    targets = dumpBenchmarkTargets(targets_file_path, tuned_result_dir)
-
-    dumpTargets(targets_file_path, targets)
-    
-    layer_swings, file_names = dumpLayerTargets(targets, tuned_result_dir, layer_desc_file)
-
-    replaceFirstLayer(layer_swings)
-    
-    return layer_swings, file_names
-    
-
-# Externally-called function    
-def compute_swing_selection(tuned_result_dir, layer_file):
-   
-    return computeLayerTargets(tuned_result_dir, layer_file)
-
-                            
-        
-                
-if __name__ == "__main__":
-
-    tuned_result_dir = "./vgg16_cifar10_tuner_1/high_confidence/"
-    layer_file = "layer_composition.txt"
-
-    tuned_result_dir = "./resnet18_cifar10_tuner_1/high_confidence/"
-    layer_file = "layer_composition2.txt"
-    computeLayerTargets(tuned_result_dir, layer_file)
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/swing_selection2.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/swing_selection2.py
deleted file mode 100644
index 588edad2a289a67d30c1ade15d4737556327f4fb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/swing_selection2.py
+++ /dev/null
@@ -1,289 +0,0 @@
-
-
-import os
-import warnings
-import matplotlib.pyplot as plt
-import matplotlib.cm as cm
-from matplotlib.ticker import MultipleLocator
-import numpy as np
-from scipy.signal import savgol_filter
-import math
-import struct
-
-
-
-def readDataFromText(textFile):
-    results = []
-    with open(textFile, "r") as f:
-        for line in f:
-            token = line.split("\t")
-            if (len(token) < 7):
-                continue
-            record = (token[0], float(token[1]), float(token[5]), float(token[6]))
-            results.append(record)
-    return results
-
-
-convL1bins =  [(0.985901, 1.36474), (0.852871, 1.16982), (0.422283, 0.55701), (0.259752, 0.335259), (0.216577, 0.277843), (0.185812, 0.23733), (0.148996, 0.189171), (0.100007, 0.125816), (0.0003127876261714846, 0.014511194080114365)]
-convL2bins =  [(0.995298, 1.3643), (0.18, 0.19), (0.14, 0.16), (0.11, 0.12), (0.08, 0.09), (0.06, 0.07), (0.04, 0.05), (0.029, 0.035), (0.00031427528200671077, 0.020199092105031013)]
-#convL2bins =  [(0.995298, 1.3643), (0.18, 0.19), (0.14, 0.16), (0.11, 0.12), (0.08, 0.09), (0.06, 0.07), (0.04, 0.05),     (0.001, 0.004), (0.00031427528200671077, 0.020199092105031013)]
-
-biasL1bins = [(0.3510325849056244, 0.49078235030174255), (0.30895063281059265, 0.4311973750591278), (0.16023841500282288, 0.22283604741096497), (0.099583700299263, 0.1381179839372635), (0.08340170979499817, 0.11503150314092636), (0.07280077040195465, 0.09948030859231949), (0.05857400223612785, 0.07965542376041412), (0.04044099152088165, 0.054193537682294846), (0.0, 0.0)]
-biasL2bins = [(0.4154910147190094, 0.5820578932762146), (0.3656001389026642, 0.5121639370918274), (0.18930286169052124, 0.2637346684932709), (0.11687946319580078, 0.16306844353675842), (0.09796475619077682, 0.13558265566825867), (0.0848352462053299, 0.11619425565004349), (0.06783176958560944, 0.09277229756116867), (0.046059850603342056, 0.062238890677690506), (0.0, 0.0)]
-
-gemmL1bins=  [(0.711203, 0.772211), (0.625894, 0.679601), (0.322665, 0.350383), (0.199646, 0.216727), (0.166556, 0.180781), (0.142945, 0.155132), (0.114662, 0.124399), (0.0771065, 0.0835984), (0.00034660729579627514, 0.008546584285795689)]
-gemmL2bins=  [(0.715208, 0.768102), (0.629411, 0.675947), (0.324433, 0.348358), (0.200659, 0.21539), (0.167381, 0.179634), (0.143637, 0.154119), (0.115197, 0.123548), (0.0774642, 0.0829647), (0.0003496285935398191, 0.009841435588896275)]
-
-
-
-def findBinByOp(op):
-    if op == 'tensorConv':
-        return convL1bins, convL2bins
-    if op == 'tensorAdd':
-        return biasL1bins, biasL2bins
-    if op == 'tensorGemm':
-        return gemmL1bins, gemmL2bins
-
-    return None, None
-
-
-def getSwing(Lx, opLxbin):
-    if opLxbin == None:
-        return 0
-    for i, (minT, maxT) in enumerate(opLxbin):
-        if Lx > minT:
-            return i
-
-    return 9
-
-
-
-def getConfiguration(L_thresholds):
-    configuration = []
-    for l in L_thresholds:
-        # L0 is op_type
-        opL1bin, opL2bin = findBinByOp(l[0])
-        # NOTE: L2 is L1 error, L3 is L2 error
-        # only using L2 for image pipelines
-        sL2 = getSwing(l[3], opL2bin)
-        if sL2 < 7:
-            sL2 = sL2 + 1
-        configuration.append((l[0], l[1], l[2], l[3], sL2, sL2, sL2))
-
-    return configuration
-
-
-def displayConfig(config):
-    for c in config:
-        print(c)
-
-def displayMultipleConfigurations(configurations):
-    for f, c in configurations.items():
-        print(f)
-        displayConfig(c)
-        print()
-
-def getConfigFromFile(filename):
-    L_requirements = readDataFromText(filename)
-    config = getConfiguration(L_requirements)
-    return config
-
-
-def getConfigurationsFromDir(dirname):
-    configurations = dict()
-    for f in os.listdir(dirname):
-        configurations[f] = getConfigFromFile(os.path.join(dirname, f))
-
-    return configurations
-              
-
-def getLayerWiseTarget(config):
-    target = []
-    for i, op in enumerate(config):
-        if (op[0] == 'tensorGemm') or (op[0] == 'tensorConv'):
-            t = op[6]
-            target.append(t)
-         
-    return target
-
-
-def dumpLayerWiseTarget(file, targets):
-    with open(file, "w") as f:
-        for name, t in targets.items():
-            f.write(name)
-            f.write(" ")
-            for i in t:
-                f.write(str(i))
-                f.write(" ")
-            f.write("\n")
-
-
-def getTargetsFromConfigurations(configs):
-    targets = dict()
-    for f, c in configs.items():
-        targets[f] = [d[6] for d in c]
-
-    return targets
-                
-
-def dumpBenchmarkTargets(name, benchmark_dir):
-    benchmark_targets = dict()
-    error = ['linear', 'log', 'quad']
-    for e in error:
-        results_dir = os.path.join(benchmark_dir, e)
-        configs = getConfigurationsFromDir(results_dir)
-        benchmark_targets[e] = getTargetsFromConfigurations(configs)
-
-    return benchmark_targets
-
-
-def dumpTargets(filename, targets):
-    with open(filename, "w") as f:
-        for e, file_configs in targets.items():
-            for name, config in file_configs.items():
-                for c in config:
-                    f.write(str(c))
-                    f.write(" ")
-                f.write("\n")
-
-
-                
-def getLayerSwings(layer_desc, configurations):
-
-    layer_swings = []
-    for i in range(len(configurations)):
-      config_vals = configurations[i]   
-      layer_index = 0
-      index = 0
-      swing_vals = []
-                   
-      while layer_index < len(layer_desc):
-        if len(layer_desc[layer_index]) == 1:
-          promise_swing = config_vals[index]
-          layer_type = layer_desc[layer_index] 
-          if layer_type != "conv" and layer_type != "dense":
-            promise_swing = -9
-          index += 1
-        else:
-          print (config_vals[index], config_vals[index+1])
-          promise_swing = max(config_vals[index], config_vals[index+1])                  
-          stride = len(layer_desc[layer_index])
-          #print ("*stride = ", stride)
-          index += stride
-          
-        swing_vals.append(promise_swing)
-        layer_index += 1  
-        
-      layer_swings.append(swing_vals)
-
-    return layer_swings
-
-                   
-                
-
-def loadLayerDesc(layer_desc_file):
-
-    layer_desc = []
-    f = open(layer_desc_file)
-    for x in f:
-      vals = x.split()
-      layer_desc.append(vals)
-
-    return layer_desc
-      
-
-
-def dumpLayerTargets(targets, tuned_result_dir, layer_desc_file):
-
-    layer_desc = loadLayerDesc(layer_desc_file)
-    print (layer_desc)
-
-    file_names = []
-    configurations = []
-    for e, file_configs in targets.items():
-      for name, config in file_configs.items():
-        config_vals = []  
-        for c in config:
-          config_vals.append(c)         
-        print (config_vals)
-
-        configurations.append(config_vals)
-
-        rank = e + "_" +  "_".join(name.split("_")[-2:])
-        file_names.append(rank)
-        
-        
-    # NOTE: get PROMISE swing values corresponding to each layer
-    layer_swings = getLayerSwings(layer_desc, configurations)
-
-    targets_file_path = tuned_result_dir + "/layer_targets.txt"
-    f = open(targets_file_path, "w+")
-
-    for config in layer_swings:
-      index = 0
-      for swing in config:
-        swing_str = ""
-        if swing == 8 or swing == 9:
-          layer_size = len(layer_desc[index])
-          for i in range(layer_size):
-            swing_str += str(swing)
-            if i < layer_size - 1:
-              swing_str += " "
-        elif swing == -9:
-          swing_str += "8"                   
-        else:
-          swing_str += str(swing)
-
-        if index < len(config) - 1:
-          swing_str += ","    
-          
-        f.write(swing_str)
-        index += 1
-        
-      f.write("\n")
-        
-    f.close()
-    
-    print(layer_swings)    
-    return layer_swings, file_names
-
-
-
-def replaceFirstLayer(layer_swings):
-
-  # Ensuring first conv on GPU
-  for conf in layer_swings:
-    conf[0] = 9
-    
-    
-    
-def computeLayerTargets(tuned_result_dir, layer_desc_file):
-
-    targets_file_path = tuned_result_dir + "/tensor_targets.txt"
-    targets = dumpBenchmarkTargets(targets_file_path, tuned_result_dir)
-
-    dumpTargets(targets_file_path, targets)
-    
-    layer_swings, file_names = dumpLayerTargets(targets, tuned_result_dir, layer_desc_file)
-
-    replaceFirstLayer(layer_swings)
-    
-    return layer_swings, file_names
-    
-
-# Externally-called function    
-def compute_swing_selection2(tuned_result_dir, layer_file):
-   
-    return computeLayerTargets(tuned_result_dir, layer_file)
-
-                            
-        
-                
-if __name__ == "__main__":
-
-    tuned_result_dir = "./vgg16_cifar10_tuner_1/high_confidence/"
-    layer_file = "layer_composition.txt"
-
-    tuned_result_dir = "./resnet18_cifar10_tuner_1/high_confidence/"
-    layer_file = "layer_composition2.txt"
-    computeLayerTargets(tuned_result_dir, layer_file)
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/utils.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/utils.py
deleted file mode 100644
index 8af0341070ccdaae24fdf537c6d35da27bfa7e5e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/utils.py
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-import os
-
-
-def createResultDirs(benchmarks):
-
-  for bench_name in benchmarks:
-    Bench = benchmarks[bench_name]
-
-    try:
-      print (Bench.result_dir_1)
-      os.mkdir(Bench.result_dir_1)    
-      os.mkdir(Bench.result_dir_2)    
-      os.mkdir(Bench.result_dir_3)    
-    except:
-      print ("!ERROR: Could NOT create result directory")
-      continue
-
-    
diff --git a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/validation.py b/llvm/projects/hpvm-tensor-rt/bin/tuner_src/validation.py
deleted file mode 100644
index 8099bd41e63ebf407fafd7a16cb38e563db37d67..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/bin/tuner_src/validation.py
+++ /dev/null
@@ -1,740 +0,0 @@
-
-
-import os
-import sys
-import subprocess
-import shutil
-from compute_confs import computePSNRBenchSwings, computeBenchSwings
-from buildRtConfig import loadConfigData
-
-
-
-def getLayerString(layer_swings):
-
-  index = 0
-  layer_string = ""
-  for swing in layer_swings:
-    layer_string += str(swing)
-    if index < len(layer_swings) - 1:
-      layer_string += ","
-  return layer_string
-
-
-
-def testValidationRun(Bench, validation_dir, layer_swings, threshold, rank_str):
-
-  #### FIXME
-  #os.chdir("../build_promise/")
-
-  validation_acc = Bench.validation_accuracy
-  target_acc = validation_acc - threshold
-
-  validation_binary = Bench.validation_binary
-
-  # Write to promise_flags
-  fout = open("promise_flags", "w+")
-  for swing in layer_swings:
-    int_swing = int(swing)
-    if int_swing > 0:
-      fout.write(str(swing) + "\n")
-  fout.close()
-  
-  # Execute Validation Run
-  p = subprocess.Popen("./" + validation_binary, shell=True)
-  p.wait()
-
-  f = open("run_accuracies.txt")
-  index = 0.0
-  unsuccessful = 0.0
-  sum_acc = 0.0
-  for x in f:
-    x = x.strip()
-    acc = float(x)
-    if acc < target_acc:
-      unsuccessful += 1
-    index += 1
-    sum_acc += acc
-
-  f.close()
-  
-  confidence = ( (index - unsuccessful) / index) * 100.0
-  print ("run_confidence = ", confidence) 
-  avg_acc = sum_acc / index
-
-  out_fname = validation_dir + validation_binary + "_" + str(avg_acc)
-  shutil.copy("run_accuracies.txt", out_fname + "_" + rank_str)
-
-  layer_string = getLayerString(layer_swings)
-  f = open(out_fname, "w")
-  f.write("config:\t" + layer_string + "\n")
-  f.write("confidence:\t" + str(confidence) + "\n")
-  f.close()
-
-  return confidence
-
-
-
-
-def testPromiseRun(Bench, layer_swings, threshold):
-
-  #### FIXME
-  #os.chdir("../build_promise/")
-
-  validation_acc = Bench.validation_accuracy
-  target_acc = validation_acc - threshold
-
-  validation_binary = Bench.validation_binary
-
-  # Write to promise_flags
-  fout = open("promise_flags", "w+")
-  for swing in layer_swings:
-    int_swing = int(swing)
-    if int_swing > 0:
-      fout.write(str(swing) + "\n")
-  fout.close()
-  
-  # Execute Validation Run
-  p = subprocess.Popen("./" + validation_binary, shell=True)
-  p.wait()
-
-  f = open("run_accuracies.txt")
-  index = 0.0
-  unsuccessful = 0.0
-  sum_acc = 0.0
-  for x in f:
-    x = x.strip()
-    acc = float(x)
-    if acc < target_acc:
-      unsuccessful += 1
-    index += 1
-    sum_acc += acc
-
-  f.close()
-  
-  confidence = ( (index - unsuccessful) / index) * 100.0
-  print ("run_confidence = ", confidence) 
-  avg_acc = sum_acc / index
-
-  return confidence
-
-
-  
-  
-
-def dumpConfigConfidence(configs, confidence_list,
-                         result_dir, layer_desc_file):
-
-    #### FIXME
-    #os.chdir("../build_tuner/")
-
-    layer_desc = loadLayerDesc(layer_desc_file)
-    print (layer_desc)
-
-    f = open(result_dir + "/conf_confidences.txt", "w+")
-
-    count = 0
-    for config in configs:
-      index = 0
-      for swing in config:
-        swing_str = ""
-        if swing == 8 or swing == 9:
-          layer_size = len(layer_desc[index])
-          for i in range(layer_size):
-            swing_str += str(swing)
-            if i < layer_size - 1:
-              swing_str += " "
-        elif swing == -9:
-          swing_str += "8"                   
-        else:
-          swing_str += str(swing)
-
-        if index < len(config) - 1:
-          swing_str += ","              
-        f.write(swing_str)
-        
-        index += 1
-
-      f.write("\t" + str(confidence_list[count]))  
-      f.write("\n")
-      count +=1
-      
-    f.close()
-  
-  
-  
-
-def dumpValidatedConfigs(configs, result_dir, layer_desc_file,
-                         output_file_name):
-
-    os.chdir("../build_tuner/")
-
-    layer_desc = loadLayerDesc(layer_desc_file)
-    print (layer_desc)
-
-    f = open(result_dir + "/" + output_file_name, "w+")
-
-    for config in configs:
-      index = 0
-      for swing in config:
-        swing_str = ""
-        if swing == 8 or swing == 9:
-          layer_size = len(layer_desc[index])
-          for i in range(layer_size):
-            swing_str += str(swing)
-            if i < layer_size - 1:
-              swing_str += " "
-        elif swing == -9:
-          swing_str += "8"                   
-        else:
-          swing_str += str(swing)
-
-        if index < len(config) - 1:
-          swing_str += ","              
-        f.write(swing_str)
-        
-        index += 1      
-      f.write("\n")
-    f.close()
-  
-
-
-def dumpRankings(validated_ranks, result_dir, rank_file):
-
-    os.chdir("../build_tuner/")
-    f = open(result_dir + "/" + rank_file, "w+")
-    for rank in validated_ranks:
-      f.write(rank + "\n")
-
-    f.close()
-  
- 
-
-
-def replaceFP32Configs(loss_confs1, loss_confs2):
-  
-  for swing_conf in loss_confs1:
-    for i in range(0, len(swing_conf)):
-      if swing_conf[i] == 9:
-        swing_conf[i] = 8
-      if i == len(swing_conf) - 1:
-        swing_conf[i] = 7
-
-  for swing_conf in loss_confs2:
-    for i in range(0, len(swing_conf)):
-      if swing_conf[i] == 9:
-        swing_conf[i] = 8
-      if i == len(swing_conf) - 1:     
-        swing_conf[i] = 7
-  
-
-  return loss_confs1, loss_confs2      
-
-
-
-def replaceGPUConfigs(Bench, loss_confs1, loss_confs2):
-
-  skip_layer_str = Bench.skip_layer_str
-  layer_ids = skip_layer_str.split("_")
-  skip_layers = []
-  for layer_id in layer_ids:
-    skip_layers.append(int(layer_id))
-    
-  
-  for swing_conf in loss_confs1:
-    for i in range(0, len(swing_conf)):
-      if i in skip_layers and swing_conf[i] < 8:
-        swing_conf[i] = 8
-   
-  for swing_conf in loss_confs2:
-    for i in range(0, len(swing_conf)):
-      if i in skip_layers and swing_conf[i] < 8:
-        swing_conf[i] = 8
-   
-
-  return loss_confs1, loss_confs2      
-
-
-
-
-def runBenchValidation(Bench):
-
-  #Bench = bench_tuner_data[bench_name]
-
-  loss_confs, conf_ranks = computeBenchSwings(Bench)
-  loss1_confs = loss_confs[0]
-  loss2_confs = loss_confs[1]
-  conf_ranks1 = conf_ranks[0]
-  conf_ranks2 = conf_ranks[1]
-
-  #loss1_confs, loss2_confs = replaceFP32Configs(loss1_confs, loss2_confs)
-  
-
-  validation_dir_1 = "../build_tuner/" + Bench.result_dir_1 + "/validation_runs/"
-  if not os.path.exists(validation_dir_1):
-    os.mkdir(validation_dir_1)
-    
-  validation_dir_2 = "../build_tuner/" +  Bench.result_dir_2 + "/validation_runs/"
-  if not os.path.exists(validation_dir_2):
-    os.mkdir(validation_dir_2)
-
-
-  ind = 0
-  validated_confs1 = []
-  validated_ranks1 = []
-  failed_confs1 = []
-  confidences1 = []
-  for layer_swings in loss1_confs:
-    print ("len(layer_Swings)  = ", len(layer_swings), "\n")
-    confidence = testValidationRun(Bench, validation_dir_1,
-                                   layer_swings, 1.0, conf_ranks1[ind])
-    if confidence >= 95:
-      validated_confs1.append(layer_swings)
-      confidences1.append(confidence)
-      validated_ranks1.append(conf_ranks1[ind])
-    else:
-      failed_confs1.append(layer_swings)
-    ind += 1
-    
-
-  ind = 0
-  validated_confs2 = []
-  validated_ranks2 = []
-  failed_confs2 = []
-  confidences2 = []
-  for layer_swings in loss2_confs:
-    confidence = testValidationRun(Bench, validation_dir_2, layer_swings, 2.0, conf_ranks2[ind])
-    if confidence >= 92:
-      validated_confs2.append(layer_swings)
-      confidences2.append(confidence)
-      validated_ranks2.append(conf_ranks2[ind])
-    else:
-      failed_confs2.append(layer_swings)
-    ind += 1  
-
-  dumpValidatedConfigs(validated_confs1, Bench.result_dir_1,
-                       Bench.layer_file, "validated_confs.txt")                      
-  dumpValidatedConfigs(validated_confs2, Bench.result_dir_2,
-                       Bench.layer_file, "validated_confs.txt")
-
-  dumpValidatedConfigs(failed_confs1, Bench.result_dir_1,
-                       Bench.layer_file, "failed_confs.txt")
-  dumpValidatedConfigs(failed_confs2, Bench.result_dir_2,
-                       Bench.layer_file, "failed_confs.txt")
-
-  dumpRankings(validated_ranks1, Bench.result_dir_1, "validated_ranks.txt")
-  dumpRankings(validated_ranks2, Bench.result_dir_2, "validated_ranks.txt")
-
-  dumpConfigConfidence(validated_confs1, confidences1,
-                       Bench.result_dir_1, Bench.layer_file)
-
-  dumpConfigConfidence(validated_confs2, confidences2,
-                       Bench.result_dir_2, Bench.layer_file)
-
-  
-  print (validated_confs1)  
-  print (validated_confs2)
-
-
-
-def readPromiseResults(loss1_file, loss2_file):
-
-  loss_confs = []
-  loss1_confs = []
-  f1 = open(loss1_file)
-  for x in f1:
-    print (x)
-    swing_toks = x.split(",")
-    swing_list = []
-    for swing_str in swing_toks:    
-      swing_val = int(swing_str.split(" ")[0])
-      swing_list.append(swing_val)
-    loss1_confs.append(swing_list)  
-
-  loss_confs.append(loss1_confs)
-    
-  loss2_confs = []
-  f2 = open(loss1_file)
-  for x in f2:
-    swing_toks = x.split(",")
-    swing_list = []
-    for swing_str in swing_toks:    
-      swing_val = int(swing_str.split(" ")[0])
-      swing_list.append(swing_val)
-    loss2_confs.append(swing_list)  
-    
-  loss_confs.append(loss2_confs)
-
-  return loss_confs
-
-
-
-
-
-
-def readPromiseResults2(loss1_file, loss2_file, layer_file):
-
-  layer_desc = loadLayerDesc(layer_file)
-  
-  loss_confs = []
-  loss1_confs = []
-  f1 = open(loss1_file)
-  for x in f1:
-    print (x)
-    swing_toks = x.split(",")
-    swing_list = []
-
-    it = 0
-    for swing_str in swing_toks:    
-      swing_val = int(swing_str.split(" ")[0])
-      if "conv" in layer_desc[it] or "dense" in layer_desc[it]:
-        swing_list.append(swing_val)
- 
-      it += 1
-      
-    loss1_confs.append(swing_list)  
-
-  loss_confs.append(loss1_confs)
-    
-  loss2_confs = []
-  f2 = open(loss1_file)
-  for x in f2:
-    swing_toks = x.split(",")
-    swing_list = []
-
-    it = 0
-    for swing_str in swing_toks:    
-      swing_val = int(swing_str.split(" ")[0])
-      if "conv" in layer_desc[it] or "dense" in layer_desc[it]:
-        swing_list.append(swing_val)
-
-      it += 1
-      
-    loss2_confs.append(swing_list)  
-    
-  loss_confs.append(loss2_confs)
-
-  return loss_confs
-
-
-
-
-
-def readPromiseResults3(result_dir):
-  
-  loss_confs = []
-  # NOTE: Second parameter is ignored
-  config_arr = loadConfigData(result_dir, 100)
-
-  for config in config_arr:
-    loss_confs.append(config.flags)
-
-  return loss_confs  
-  
-  
-
-
-
-
-
-
-def runPromiseBenchValidation(Bench):
-
-  
-  dir_prefix = "../build_tuner/"
-  #Bench = bench_tuner_data[bench_name]
-  #loss_confs = readPromiseResults(dir_prefix + Bench.loss1_result_file, dir_prefix + Bench.loss2_result_file)
-  loss_confs = readPromiseResults2(dir_prefix + Bench.loss1_result_file, dir_prefix + Bench.loss2_result_file, Bench.layer_file)
-  
-  loss1_confs = loss_confs[0]
-  loss2_confs = loss_confs[1]
- 
-  ind = 0
-  validated_confs1 = []
-  failed_confs1 = []
-  for layer_swings in loss1_confs:
-    confidence = testPromiseRun(Bench, layer_swings, 1.0)
-    if confidence >= 95:
-      validated_confs1.append(layer_swings)
-    else:
-      failed_confs1.append(layer_swings)
-    ind += 1
-    
-
-  ind = 0
-  validated_confs2 = []
-  failed_confs2 = []
-  for layer_swings in loss2_confs:
-    confidence = testPromiseRun(Bench, layer_swings, 2.0)
-    if confidence >= 95:
-      validated_confs2.append(layer_swings)
-    else:
-      failed_confs2.append(layer_swings)
-    ind += 1  
-
-
-  dumpValidatedConfigs(validated_confs1, Bench.result_dir_1,
-                       Bench.layer_file, "promise_validated_confs.txt")                      
-  dumpValidatedConfigs(validated_confs2, Bench.result_dir_2,
-                       Bench.layer_file, "promise_validated_confs.txt")
-
-  dumpValidatedConfigs(failed_confs1, Bench.result_dir_1,
-                       Bench.layer_file, "promise_failed_confs.txt")
-  dumpValidatedConfigs(failed_confs2, Bench.result_dir_2,
-                       Bench.layer_file, "promise_failed_confs.txt")
-
-
-
-
-  
-def copyValidatedConf(result_dir, validated_confs):
-
-  src_dir = result_dir + "/promise_tuner/high_confidence/"
-  dest_dir = result_dir + "/promise_tuner/validated/"
-
-  if not os.path.isdir(dest_dir):
-    os.mkdir(dest_dir)
-
-  for fname in validated_confs:
-    shutil.copy(src_dir + fname, dest_dir + fname)  
-
-  
-
-def copyFailedConf(result_dir, failed_confs):
-
-  src_dir = result_dir + "/promise_tuner/high_confidence/"
-  dest_dir = result_dir + "/promise_tuner/failed/"
-
-  if not os.path.isdir(dest_dir):
-    os.mkdir(dest_dir)
-
-  for fname in failed_confs:
-    shutil.copy(src_dir + fname, dest_dir + fname)  
-    
-  
-  
-
-def validateConfigs(Bench, result_dir, configs_arr, acc_thresh):
-
-  validated_confs = []
-  failed_confs = []
-  for conf in configs_arr:
-    layer_swings = conf.flags
-    confidence = testPromiseRun(Bench, layer_swings, acc_thresh)
-    if confidence >= 95:
-      validated_confs.append(conf.fname)
-    else:
-      failed_confs.append(conf.fname)
-
-    
-  copyValidatedConf(result_dir, validated_confs)                    
-  copyFailedConf(result_dir, failed_confs) 
-
-
-
-
-
-                 
-
-def runPromiseBenchValidation2(Bench):
-
-  
-  config_arr1 = loadConfigData(Bench.result_dir_1, 100)
-  config_arr2 = loadConfigData(Bench.result_dir_2, 100)
-  config_arr3 = loadConfigData(Bench.result_dir_3, 100)
-
-  
-  validateConfigs(Bench, Bench.result_dir_1, config_arr1, 1.0)
-  validateConfigs(Bench, Bench.result_dir_2, config_arr2, 2.0)
-  validateConfigs(Bench, Bench.result_dir_3, config_arr3, 3.0)
-  
-
-
-
-### NOTE: Algo Tuner Validation routines
-
-
-  
-
-
-def addAccuracyLoss(dest_file, accuracy_loss):
-
-  f = open(dest_file, "r")
-  file_str = ""
-  ind = 0
-  for x in f:
-    line_str = x
-    if ind == 0:
-      line_str = x.replace("\n", "")
-      line_str += "\tvalidation_loss=" + str(accuracy_loss) + "\n"
-
-    file_str += line_str
-    ind += 1    
-  f.close()
-
-  
-  f_out = open(dest_file, "w+")
-  f_out.write(file_str)
-  f_out.close()
-
-
-
-   
-def dumpValidConfigs(result_dir, validated_confs):
-
-  src_dir = result_dir + "/algo_tuner/high_confidence/"
-  dest_dir = result_dir + "/algo_tuner/validated/"
-
-  if not os.path.isdir(dest_dir):
-    os.mkdir(dest_dir)
-
-  for (fname, accuracy_loss) in validated_confs:
-    dest_file = dest_dir + fname  
-    shutil.copy(src_dir + fname, dest_file)  
-    addAccuracyLoss(dest_file, accuracy_loss)
-  
-  
-
-def dumpFailedConfigs(result_dir, failed_confs):
-
-  src_dir = result_dir + "/algo_tuner/high_confidence/"
-  dest_dir = result_dir + "/algo_tuner/failed/"
-
-  if not os.path.isdir(dest_dir):
-    os.mkdir(dest_dir)
-
-  for (fname, accuracy_loss) in failed_confs:
-    dest_file = dest_dir + fname  
-    shutil.copy(src_dir + fname, dest_file) 
-    addAccuracyLoss(dest_file, accuracy_loss)
-
-  
-  
- 
-def readAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  file_str = file.read()
-  file.close()
-
-  accuracy = 0.0
-  try:
-    accuracy = float(file_str)
-  except:
-    print ("ERROR: Reading accuracy from 'final_accuracy' file")
-    sys.exit(0)
-  
-  print ("accuracy = ", accuracy)
-  return accuracy
-
-def getBaselineConfig(num_layers):
-
-  fp32_swing = 11
-  swings = []
-  
-  for i in range(num_layers):
-    swings.append(str(fp32_swing))
-    
-  return swings
-
-
-
-def readConfidence(target_acc):
-
-  f = open("run_accuracies.txt")
-  index = 0.0
-  unsuccessful = 0.0
-  sum_acc = 0.0
-  for x in f:
-    x = x.strip()
-    acc = float(x)
-    if acc < target_acc:
-      unsuccessful += 1
-    index += 1
-    sum_acc += acc
-
-  f.close()
-  
-  confidence = ( (index - unsuccessful) / index) * 100.0
-  print ("run_confidence = ", confidence) 
-  avg_acc = sum_acc / index
-
-  return confidence, avg_acc
-
-
-
-def invokeBinary(binary_path, layer_swings, runs, input_size, offset, target_acc): # threshold):
-
-  default_skip = 4
-  # Write to promise_flags
-  fout = open("promise_flags", "w+")
-  for swing in layer_swings:
-    int_swing = int(swing)
-    if int_swing > 0:
-      fout.write(str(swing) + "\n")
-  fout.close()
-
-  run_cmd = "./" + binary_path + " " + str(runs) + " " + str(target_acc) + " " + str(default_skip) + " " + str(input_size) + " " + str(offset)
-  # Execute Validation Run
-  #p = subprocess.Popen("./" + validation_binary, shell=True)
-
-  p = subprocess.Popen(run_cmd, shell=True)
-  p.wait()
-  
-
-
-
-
-def validateAlgoConfigs(binary_path, result_dir, configs_arr, gold_acc, acc_thresh, runs):
-
-  # NOTE: Use confidence target as 95%
-  confidence_target = 95
-  # NOTE: 1 run sufficient for software approximations
-  
-  validated_confs = []
-  failed_confs = []
-
-  #validation_acc = Bench.validation_accuracy
-  target_acc = gold_acc - acc_thresh
-  
-  for conf in configs_arr:
-    layer_swings = conf.flags
-    invokeBinary(binary_path, layer_swings, runs, 2000, 8000, target_acc) 
-    confidence, avg_acc = readConfidence(target_acc)
-    
-    accuracy_loss = gold_acc - avg_acc      
-    if confidence >= confidence_target:
-      validated_confs.append((conf.fname, accuracy_loss))
-    else:
-      failed_confs.append((conf.fname, accuracy_loss))
-
-
-  dumpValidConfigs(result_dir, validated_confs)                    
-  dumpFailedConfigs(result_dir, failed_confs) 
-  
-
-
-
-def runAlgoBenchValidate(Bench):
-
-  num_layers = Bench.num_layers
-  base_conf = getBaselineConfig(num_layers)
-  # Path to binary to run
-  binary_path = Bench.promise_binary
-  # NOTE: 'target_acc' passed 0.0 since unused for baseline run
-  invokeBinary(binary_path, base_conf, 1, 2000, 8000, 0.0)
-  gold_acc = readAccuracy("final_accuracy")
-
-  
-  loss1_dir = Bench.result_dir_1
-  loss2_dir = Bench.result_dir_2
-  loss3_dir = Bench.result_dir_3
-
-  loss1_configs = loadConfigData(loss1_dir, 100)
-  loss2_configs = loadConfigData(loss2_dir, 100)
-  loss3_configs = loadConfigData(loss3_dir, 100)
-
-  runs = 1
-  validateAlgoConfigs(binary_path, loss1_dir, loss1_configs, gold_acc, 1.0, runs)
-  validateAlgoConfigs(binary_path, loss2_dir, loss2_configs, gold_acc, 2.0, runs)
-  validateAlgoConfigs(binary_path, loss3_dir, loss3_configs, gold_acc, 3.0, runs)
-  
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/CMakeLists.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/CMakeLists.txt
deleted file mode 100644
index 095e037430dbf1751dddfd047d0cf0157ad9e2e7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/CMakeLists.txt
+++ /dev/null
@@ -1,119 +0,0 @@
-cmake_minimum_required (VERSION 2.6)
-project (cudnn-training)
-
-find_package(CUDA 6.5 REQUIRED)
-
-
-if (CMAKE_BUILD_TYPE STREQUAL "Debug")
-  message("Debug mode")
-    set(CUDA_NVCC_FLAGS ${CUDA_NVCC_FLAGS};-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_60,code=compute_60;-std=c++11;-g;-lineinfo;-Xcompiler;-ggdb;-lcurand)
-else()
-   set(CUDA_NVCC_FLAGS ${CUDA_NVCC_FLAGS};-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_60,code=compute_60;-std=c++11;-DNDEBUG;-Xcompiler;-DNDEBUG;-lcurand)
-endif()
-
-set(CUDA_PROPAGATE_HOST_FLAGS OFF)
-
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11  -I/  " )
-
-add_definitions(-DNO_INJECTION)
-add_definitions(-DPROMISE_TUNER_ENABLED)
-if(USE_GFLAGS)
-  add_definitions(-DUSE_GFLAGS)
-endif()
-
-if(USE_AUTOTUNER)
-  remove_definitions(-DNO_INJECTION)
-endif()
-
- 
-
-include_directories($ENV{CUDNN_PATH} /home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/$ENV{CUDNN_PATH}/include)
-include_directories(/home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/./tensor_runtime/include)
-include_directories(/home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/../gpu_profiler/include)
-include_directories(/home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/../soc_simulator/include)
-link_directories($ENV{CUDNN_PATH} /home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/$ENV{CUDNN_PATH}/lib /home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/$ENV{CUDNN_PATH}/lib64)
-
-
-cuda_add_library(tensor_runtime /home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/tensor_runtime.cu)
-cuda_add_cublas_to_target(tensor_runtime)
-
-cuda_add_library(tensor_cpu_runtime /home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/tensor_cpu_runtime.cc)
-
-find_library(GPU_PROFILER_LIB
-    NAMES libgpu_profiler.a
-    HINTS /home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/../gpu_profiler/lib
-)
-
-find_library(SOC_SIMULATOR_LIB
-    NAMES libpromise_profiler.a
-    HINTS /home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/../soc_simulator/lib
-)
-
-
-if(USE_GFLAGS)
-  target_link_libraries(tensor_runtime gflags cudnn -lcurand)
-else()
-  target_link_libraries(tensor_runtime cudnn -lcurand)
-endif()
-
-target_link_libraries(tensor_cpu_runtime)
-
-# lenet_keras_half_autogenerated_knobs
-add_executable(lenet_keras_fp16_perf20 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf20.cc)
-target_link_libraries(lenet_keras_fp16_perf20 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf26 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf26.cc)
-target_link_libraries(lenet_keras_fp16_perf26 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf22 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf22.cc)
-target_link_libraries(lenet_keras_fp16_perf22 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf25 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf25.cc)
-target_link_libraries(lenet_keras_fp16_perf25 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf23 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf23.cc)
-target_link_libraries(lenet_keras_fp16_perf23 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_samp33 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_samp33.cc)
-target_link_libraries(lenet_keras_fp16_samp33 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf24 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf24.cc)
-target_link_libraries(lenet_keras_fp16_perf24 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_samp31 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_samp31.cc)
-target_link_libraries(lenet_keras_fp16_samp31 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf30 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf30.cc)
-target_link_libraries(lenet_keras_fp16_perf30 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_samp36 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_samp36.cc)
-target_link_libraries(lenet_keras_fp16_samp36 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf21 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf21.cc)
-target_link_libraries(lenet_keras_fp16_perf21 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_samp34 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_samp34.cc)
-target_link_libraries(lenet_keras_fp16_samp34 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_samp32 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_samp32.cc)
-target_link_libraries(lenet_keras_fp16_samp32 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_samp35 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_samp35.cc)
-target_link_libraries(lenet_keras_fp16_samp35 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf29 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf29.cc)
-target_link_libraries(lenet_keras_fp16_perf29 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf27 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf27.cc)
-target_link_libraries(lenet_keras_fp16_perf27 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-add_executable(lenet_keras_fp16_perf28 lenet_keras_half_autogenerated_knobs/lenet_keras_fp16_perf28.cc)
-target_link_libraries(lenet_keras_fp16_perf28 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
-
-# lenet_keras_autogenerated_knobs
-add_executable(lenet_keras_fp32_perf20 lenet_keras_autogenerated_knobs/lenet_keras_fp32_perf20.cc)
-target_link_libraries(lenet_keras_fp32_perf20 tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB})
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_different_clock_frequencies_testing_automator.py b/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_different_clock_frequencies_testing_automator.py
deleted file mode 100644
index d787af8ec350b7fa2f2eeb2b0ed4c3ae4c015c95..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_different_clock_frequencies_testing_automator.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Automates online benchmark testing with different clock speeds
-# Input: GPU clock speed, DDR clock speed, set of benchmark names to test
-# Set of benchmarks format: (full_bin_name, half_bin_name)
-import os
-import sys
-
-from subprocess import Popen, PIPE
-
-def set_clock_speeds(gpu_speed_mhz, ddr_speed_mhz):
-    def find_closest_clock_speed(goal_speed):
-        # Reads /sys/devices/17000000.gp10b/devfreq/17000000.gp10b/available_frequencies
-        # and finds the closest clock speed
-        AVAIL_FREQS = "/sys/devices/17000000.gp10b/devfreq/17000000.gp10b/available_frequencies"
-        avail_freqs_file = open(AVAIL_FREQS, "r")
-        avail_speeds_lst = avail_freqs_file.read().strip().split()
-        avail_freqs_file.close()
-
-        min_diff = abs(gpu_speed - int(avail_speeds_lst[0])) 
-        closest_speed = int(avail_speeds_lst[0])
-        for avail_speed in avail_speeds_lst[1:]:
-            avail_speed = int(avail_speed)
-            curr_diff = abs(gpu_speed - avail_speed)
-            if curr_diff < min_diff:
-                min_diff = curr_diff
-                closest_speed = avail_speed
-        return closest_speed
-
-    new_conf_filename = 'jetson_clocks_conf%d_%d.txt' % (gpu_speed_mhz, ddr_speed_mhz)
-    curr_conf_filename = "jetson_clocks_conf_backup.txt"
-    if os.path.isfile(curr_conf_filename):
-        os.remove(curr_conf_filename)
-
-    # Get the current configurations in a file 
-    sudo_password = 'nvidia'
-    p = Popen(['sudo', '/home/nvidia/jetson_clocks.sh', '--store', curr_conf_filename], \
-            stdin=PIPE, universal_newlines=True)
-    p.communicate(sudo_password + '\n')
-    assert p.returncode == 0
-
-    # Read the current config file in 
-    curr_conf_file = open(curr_conf_filename, "r")
-    curr_confs = curr_conf_file.read().strip().split('\n')
-    curr_conf_file.close()
-    
-    GPU_MIN_FREQ = "/sys/devices/17000000.gp10b/devfreq/17000000.gp10b/min_freq"
-    GPU_MAX_FREQ = "/sys/devices/17000000.gp10b/devfreq/17000000.gp10b/max_freq"
-    GPU_CUR_FREQ = "/sys/devices/17000000.gp10b/devfreq/17000000.gp10b/cur_freq"
-    
-    DDR_UPDATE_PATH = "/sys/kernel/debug/bpmp/debug/clk/emc/rate"
-
-    # Copy everything in the old configuration except for the GPU/DDR lines
-    new_conf_file = open(new_conf_filename, "w")
-    for line in curr_confs:
-		# Write the GPU clock frequencies at the end to configure the clocks even if
-		# the current configuration doesn't have one of the lines
-        if line.startswith(GPU_MIN_FREQ) or line.startswith(GPU_MAX_FREQ) or \
-					line.startswith(GPU_CUR_FREQ) or line.startswith(DDR_UPDATE_PATH):
-            continue
-        else:
-            new_conf_file.write("%s\n" % line)
-
-    MHZ_TO_HZ_MULT = 1000000
-    gpu_speed = gpu_speed_mhz * MHZ_TO_HZ_MULT
-    ddr_speed = ddr_speed_mhz * MHZ_TO_HZ_MULT
-
-	# Set GPU
-    closest_gpu_speed = find_closest_clock_speed(gpu_speed)
-    print("Setting GPU speed to %d" % closest_gpu_speed)
-    new_conf_file.write("%s:%d\n" % (GPU_MIN_FREQ, closest_gpu_speed))
-    new_conf_file.write("%s:%d\n" % (GPU_MAX_FREQ, closest_gpu_speed))
-    #new_conf_file.write("%s:%d\n" % (GPU_CUR_FREQ, closest_gpu_speed))
-
-	# Set DDR
-    new_conf_file.write("%s:%d\n" % (DDR_UPDATE_PATH, ddr_speed))
-    new_conf_file.close()
-
-    # Set the new configuration
-    p = Popen(['sudo', '/home/nvidia/jetson_clocks.sh', '--restore', new_conf_filename], \
-            stdin=PIPE, universal_newlines=True)
-    p.communicate(sudo_password + '\n')
-    assert p.returncode == 0
-    print("SUCCESSFULLY SET CLOCK SPEEDS")
-
-
-def run_benchmark(bin_name, should_print_bin_output):
-    print("RUNNING %s" % bin_name)
-    proc = Popen("./%s" % bin_name, stdout = PIPE, universal_newlines = True)
-    proc_output = proc.communicate()[0]
-    assert proc.returncode == 0
-    
-    if should_print_bin_output:
-		print(proc_output)
-    print("FINISHED RUNNING %s" % bin_name)
-    return proc_output    
-
-
-def parse_binary_output(proc_output):
-    avg_time_key_ind = proc_output.find("Average time:")
-    assert avg_time_key_ind >= 0
-    avg_time = proc_output[avg_time_key_ind : proc_output.find("\n", avg_time_key_ind)]
-    print(avg_time)
-    return avg_time
-
-
-# Input: a list of tuples of benchmark names
-# Can change to input a file containing benchmarks to run 
-def run_benchmarks(benchmarks_filename, output_filename, should_print_bin_output):
-    benchmarks_file = open(benchmarks_filename, "r")
-    output_file = open(output_filename, "w")
-
-    def parse_binary_names_tuple(tuple_line):
-        tuple_line = tuple_line.replace("(", "").replace(")", "").strip().split(',')
-        return tuple_line[0].strip(), tuple_line[1].strip()
-
-    for line in benchmarks_file:
-        full_bin_name, half_bin_name = parse_binary_names_tuple(line)
-        output_file.write("%s: %s\n" % (full_bin_name, \
-                parse_binary_output(run_benchmark(full_bin_name, should_print_bin_output))))
-        output_file.write("%s: %s\n" % (half_bin_name, \
-                parse_binary_output(run_benchmark(half_bin_name, should_print_bin_output))))    
-
-    benchmarks_file.close()
-    output_file.close()
-
-
-if __name__ == "__main__":
-    num_args = len(sys.argv)
-
-    if num_args != 5 and num_args != 6:
-        print("Usage: python online_benchmark_testing_automator.py <gpu freq in MHz> <ddr freq in MHz> <binary_names_file> <output_file> [1 to print binary output]")
-        print("Binary names file format: (full_binary_name, half_binary_name)<newline>")
-        exit(1)
-    print("GPU clock speed: %s" % sys.argv[1])
-    print("DDR clock speed: %s" % sys.argv[2])
-    print("Benchmarks file name: %s" % sys.argv[3])
-    print("Output file name: %s" % sys.argv[4])
-
-    set_clock_speeds(int(sys.argv[1]), int(sys.argv[2]))
-    run_benchmarks(sys.argv[3], sys.argv[4], num_args == 6 and sys.argv[-1] == "1")
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_testing_automator.py b/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_testing_automator.py
deleted file mode 100644
index 197b653d3bf6983a9500badcc4766bac1274fb63..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_testing_automator.py
+++ /dev/null
@@ -1,90 +0,0 @@
-# Automates online benchmark testing with different clock speeds
-# Input: set of benchmark names to test
-# Set of benchmarks format: (full_bin_name, half_bin_name)
-import os
-import sys
-
-from collections import defaultdict
-from subprocess import Popen, PIPE
-
-def run_benchmark(bin_name): 
-    print("RUNNING %s" % bin_name)
-    proc = Popen("./%s" % bin_name, stdout = PIPE, universal_newlines = True)
-    proc_output = proc.communicate()[0]
-    assert proc.returncode == 0
-    print("FINISHED RUNNING %s" % bin_name)
-    return proc_output    
-
-
-def parse_binary_output(proc_output, per_tensor):
-    final_acc_key_ind = proc_output.find("**** Final Accuracy")
-    assert final_acc_key_ind >= 0
-    final_acc = proc_output[final_acc_key_ind : proc_output.find("\n", final_acc_key_ind)]
-    print(final_acc)
-
-    if per_tensor:
-        first_op_ind = proc_output.find("Operation ")
-        total_op_ind = proc_output.find('\n', proc_output.find("Total energy"))
-        assert first_op_ind >= 0
-        assert total_op_ind >= 0
-
-        time_energy_output = proc_output[first_op_ind : total_op_ind]
-        print(time_energy_output)
-        return time_energy_output, final_acc
-
-    else:
-        avg_time_key_ind = proc_output.find("Average time:")
-        assert avg_time_key_ind >= 0
-
-        avg_time = proc_output[avg_time_key_ind : proc_output.find("\n", avg_time_key_ind)]
-        print(avg_time)
-
-        return avg_time, final_acc
-
-
-def get_sorted_binaries(builds_dir):
-    # dict of network names to lists of binaries
-    # list of binaries should be in sorted order (can do that when we run the benchmarks)
-    network_bins = defaultdict(list)
-    for bin_name in os.listdir(builds_dir):
-        if bin_name.find("profiling") == -1:
-            continue
-        network_name = bin_name[ : bin_name.rfind("_")]
-        network_bins[network_name].append(bin_name)
-    return network_bins
-
-
-# Input: a list of tuples of benchmark names
-# Can change to input a file containing benchmarks to run 
-def run_benchmarks(sorted_bins, builds_dir, output_filename, per_tensor):
-    def get_knob_id(bin_name):
-        return int(bin_name[bin_name.rfind("_") + 1 : ])
-
-    output_file = open(output_filename, "w", buffering = 0)
-    for network_name in sorted(sorted_bins.keys()):
-        # Sort the binaries in order by knob id
-        sorted_bins[network_name].sort(key = get_knob_id)
-        print("--------------------------------------")
-        print(network_name)
-        output_file.write("--------------------------------------\n%s\n" % network_name)
-
-        # Go through all binaries
-        for bin_name in sorted_bins[network_name]:
-            print(bin_name)
-            binary_output = run_benchmark(os.path.join(builds_dir, bin_name)) 
-            time_energy_output, final_acc = parse_binary_output(binary_output, per_tensor)
-            output_file.write("%s, %s, %s\n\n" % (bin_name, time_energy_output, final_acc))
-        print("--------------------------------------\n")
-        output_file.write("--------------------------------------\n\n")
-    output_file.close()
-
-if __name__ == "__main__":
-    num_args = len(sys.argv)
-
-    if num_args != 3 and num_args != 4:
-        print("Usage: python online_benchmark_testing_automator.py <builds dir> <outputs_file_name> [per_tensor]")
-        print("To delete autogen dirs: python online_benchmark_testing_automator.py clean")
-        exit(1)
-    print("Output file name: %s" % sys.argv[2])
-    sorted_bins = get_sorted_binaries(sys.argv[1])
-    run_benchmarks(sorted_bins, sys.argv[1], sys.argv[2], num_args == 4 and sys.argv[3] == "per_tensor")
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/cmakelists_generator.py b/llvm/projects/hpvm-tensor-rt/code_autogenerators/cmakelists_generator.py
deleted file mode 100644
index 04f6c5eec378276cd0c89fcc7013cb6996a90f2f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/cmakelists_generator.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# Generates a CMakeLists.txt file for all generated files in a specific directory
-# Input: Arbitrarily long list containing names of all generated files directories
-# Ex: alexnet_cifar10_autogenerated_knobs mobilenet_cifar10_autogenerated_knobs
-# If inputted 0 parameters: Generates CMakeLists.txt file for all generated files in CURRENT dir
-
-import sys
-import os
-
-def get_all_generated_directory_names(): 
-    '''
-    Returns a list of all generated source code directories (<>_autogenerated_knobs)
-    in the current directory. Called when program is run with 0 args
-    '''
-    generated_dir_names = []
-    for dir_name in os.listdir("."):
-        print(dir_name)
-        if dir_name.endswith("autogenerated_knobs"):
-            generated_dir_names.append(dir_name)
-    return generated_dir_names
-
-
-def generate_cmakelists_setup(cmakelists_file):
-    '''
-    Copies over all the setup instructions (ex: finding libraries) from a "base" CMakeLists.txt
-    file. Ends copyng when we find the first instance of add_executable
-
-    Args:
-        cmakelists_file: File object to write cmake instructions to 
-
-    Assumption: All setup instructions are being any add_executable instructions
-    '''
-    BASE_CMAKELISTS_PATH = "/home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt"
-    base_cmakelists_file = open(os.path.join(BASE_CMAKELISTS_PATH, "CMakeLists.txt"), "r")
-
-    find_lib_line = ""
-
-    for line in base_cmakelists_file:
-        if line.find("add_executable") != -1:
-            break
-
-        elif line.startswith("#"):
-            continue
-
-        # Special case: ignore / if -I flag exists
-        elif line.find("/") != -1 and line.find("-I") == -1: 
-            dot_dot_slash_ind = line.find("../")
-            dot_slash_ind = line.find("./")
-            if dot_dot_slash_ind != -1:
-                start_ind = dot_dot_slash_ind
-            elif dot_slash_ind != -1:
-                start_ind = dot_slash_ind
-            else:
-                slash_ind = line.find("/")
-                prev_space_ind = line[:slash_ind].rfind(" ")
-                start_ind = prev_space_ind + 1
-
-            old_rel_path = []
-            while start_ind < len(line):
-                if line[start_ind] == ")" or line[start_ind].isspace():
-                    break
-                old_rel_path.append(line[start_ind])
-                start_ind += 1
-            old_rel_path = ''.join(old_rel_path)
-            if os.path.isabs(old_rel_path):
-                cmakelists_file.write(line)
-            else:
-                new_path = os.path.join(BASE_CMAKELISTS_PATH, old_rel_path)
-                cmakelists_file.write(line.replace(old_rel_path, new_path))
-            continue
-        cmakelists_file.write(line)
-    base_cmakelists_file.close()
-
-
-def generate_cmakelists_file(cmakelists_file, source_file_dirs):
-    generate_cmakelists_setup(cmakelists_file)
-    LIBRARIES = "tensor_runtime ${GPU_PROFILER_LIB} ${SOC_SIMULATOR_LIB}"
-    cmake_instrs = []
-
-    for source_file_dir in source_file_dirs:
-        cmake_instrs.append("# %s" % source_file_dir)
-        for source_file in os.listdir(source_file_dir):
-            # Executable name = name of source code file without file extension
-            file_ext_ind = source_file.find(".cc")
-            if file_ext_ind == -1:
-                print("WARNING: Found file with wrong extension. Skipping. %s" % source_file)
-                continue
-            exec_name = source_file[ : file_ext_ind]
-            
-            source_file_path = os.path.join(source_file_dir, source_file)
-            cmake_instrs.append("add_executable(%s %s)" % (exec_name, source_file_path))
-            cmake_instrs.append("target_link_libraries(%s %s)\n" % (exec_name, LIBRARIES))
-        cmake_instrs.append("\n")
-    cmakelists_file.write('\n'.join(cmake_instrs))
-
-
-if __name__ == "__main__":
-    num_args = len(sys.argv)
-
-    if num_args >= 2 and sys.argv[1] == "--usage":
-        print("python cmakelists_generator.py <names of all generated files directories>")
-        print("If given no parameters: Generates CMakeLists.txt file for all generated files in CURRENT directory")
-        exit(1)
-
-    cmakelists_file = open("CMakeLists.txt", "w")
-    if num_args == 1:
-        generate_cmakelists_file(cmakelists_file, get_all_generated_directory_names())
-    else:
-        generate_cmakelists_file(cmakelists_file, sys.argv[1:])
-    cmakelists_file.close()
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16.txt
deleted file mode 100644
index 563d7f4a03b3b3a50e2c08c76616a88ea7958b5a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-../dnn_sources/src/half/profiling/alexnet2_cifar10_half_profiling.cc
-../dnn_sources/src/half/profiling/alexnet_cifar10_half_profiling.cc
-../dnn_sources/src/half/profiling/mobilenet_depthwise_half_profiling.cc
-../dnn_sources/src/half/profiling/mobilenet_shallow_depthwise_half_profiling.cc
-../dnn_sources/src/half/profiling/resnet18_cifar10_half_profiling.cc
-../dnn_sources/src/half/profiling/vgg16_cifar100_half_profiling.cc
-../dnn_sources/src/half/profiling/vgg16_cifar10_half_profiling.cc
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16_first_three.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16_first_three.txt
deleted file mode 100644
index 4a0beb250e2241c7523e69b5262cb9ffc977d28d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16_first_three.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-../dnn_sources/src/half/profiling/alexnet2_cifar10_half_profiling.cc
-../dnn_sources/src/half/profiling/alexnet_cifar10_half_profiling.cc
-../dnn_sources/src/half/profiling/resnet18_cifar10_half_profiling.cc
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16_remainder.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16_remainder.txt
deleted file mode 100644
index 20ca95abcf1ee1aab337fa391abb5f1a74583fe1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16_remainder.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-../dnn_sources/src/half/profiling/mobilenet_depthwise_half_profiling.cc
-../dnn_sources/src/half/profiling/mobilenet_shallow_depthwise_half_profiling.cc
-../dnn_sources/src/half/profiling/vgg16_cifar100_half_profiling.cc
-../dnn_sources/src/half/profiling/vgg16_cifar10_half_profiling.cc
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16_sources.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16_sources.txt
deleted file mode 100644
index 506497e42889dc1d8bb2465912e87f56464e7ecc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp16_sources.txt
+++ /dev/null
@@ -1 +0,0 @@
-../dnn_sources/src/half/lenet_keras_half.cc
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp32.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp32.txt
deleted file mode 100644
index 12b87930416c4269a62f2020a06b42cf5cf4dc13..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp32.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-../dnn_sources/src/profiling/alexnet2_profiling.cc
-../dnn_sources/src/profiling/alexnet_cifar10_profiling.cc
-../dnn_sources/src/profiling/mobilenet_cifar10_profiling.cc
-../dnn_sources/src/profiling/mobilenet_shallow_profiling.cc
-../dnn_sources/src/profiling/mobilenet_depthwise_profiling.cc
-../dnn_sources/src/profiling/mobilenet_shallow_depthwise_profiling.cc
-../dnn_sources/src/profiling/resnet18_cifar10_profiling.cc
-../dnn_sources/src/profiling/vgg16_cifar100_profiling.cc
-../dnn_sources/src/profiling/vgg16_cifar10_profiling.cc
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp32_sources.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp32_sources.txt
deleted file mode 100644
index cd8f03c30712f0162db2cc8bcf563087be05bf64..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp32_sources.txt
+++ /dev/null
@@ -1 +0,0 @@
-../dnn_sources/src/lenet_keras.cc
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp32_test.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp32_test.txt
deleted file mode 100644
index a59f773cda240a311c0c873c9366494018b87312..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_fp32_test.txt
+++ /dev/null
@@ -1 +0,0 @@
-../dnn_sources/src/profiling/mobilenet_shallow_depthwise_profiling.cc
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_mobilenet_depth.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_mobilenet_depth.txt
deleted file mode 100644
index 2b7382da3570917c1983ad0c3fe02763d8565635..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_mobilenet_depth.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-../dnn_sources/src/profiling/mobilenet_depthwise_profiling.cc
-../dnn_sources/src/profiling/mobilenet_shallow_depthwise_profiling.cc
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_one_file.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_one_file.txt
deleted file mode 100644
index 32b18d4ca22672be6b44ecb674ea3ad00e18276d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/filenames_one_file.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-../dnn_sources/src/half/profiling/vgg16_cifar100_half_profiling.cc
-../dnn_sources/src/half/profiling/vgg16_cifar10_half_profiling.cc
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16.txt
deleted file mode 100644
index 207eb1ed1f45ffde7dad0da4e125aa0ceaa5c5cd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-perf,20 1,1,1,1   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,21 1,2,1,0   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,22 1,2,1,1   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,23 1,3,1,0   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,24 1,3,1,1   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,25 1,3,1,2   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,26 2,1,1,0   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,27 2,1,1,1   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,28 3,1,1,0   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,29 3,1,1,1   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,30 3,1,1,2   1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,31 1,1,2,0     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,32 1,1,2,1     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,33 1,1,4,0     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,34 1,1,4,1     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,35 1,1,4,2     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,36 1,1,4,3     1.88    tensorHalfConvolution   tensorConvApproxHalf
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_knobs_31_36.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_knobs_31_36.txt
deleted file mode 100644
index fc76565110cf34ab57024dd852c1a51b23a8f45e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_knobs_31_36.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-samp,31 1,1,2,0     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,32 1,1,2,1     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,33 1,1,4,0     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,34 1,1,4,1     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,35 1,1,4,2     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,36 1,1,4,3     1.88    tensorHalfConvolution   tensorConvApproxHalf
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_old.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_old.txt
deleted file mode 100644
index 72c43e61288c532feed94f5768357b3113d5de49..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_old.txt
+++ /dev/null
@@ -1,18 +0,0 @@
-perf,20 1,1,0   2.25    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,21 1,2,0   2.25    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,22 1,2,1   2.25    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,23 1,3,0   1.88    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,24 1,3,1   1.88    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,25 1,3,2   1.88    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,26 2,1,0   2.25    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,27 2,1,1   2.25    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,28 3,1,0   1.88    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,29 3,1,1   1.88    tensorHalfConvolution   tensorConvPerfCudaHalf
-perf,30 3,1,2   1.88    tensorHalfConvolution   tensorConvPerfCudaHalf
-samp,31 2,0     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,32 2,1     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,33 4,0     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,34 4,1     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,35 4,2     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,36 4,3     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,37 1,1     1.88    tensorHalfConvolution   tensorConvInputHalf
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_samp.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_samp.txt
deleted file mode 100644
index 0f0593226f6fbeddda91046e7416fe108bfb6d90..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_samp.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-samp,31 2,0     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,32 2,1     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,33 4,0     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,34 4,1     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,35 4,2     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,36 4,3     1.88    tensorHalfConvolution   tensorConvInputHalf
-samp,37 1,1     1.88    tensorHalfConvolution   tensorConvInputHalf
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_vgg16.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_vgg16.txt
deleted file mode 100644
index a172a4e515ebfd24a51267da8bac2cb5f13ce6c0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp16_vgg16.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-perf,20 1,1,1,1   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,21 1,2,1,0   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,22 1,2,1,1   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,23 1,3,1,0   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,24 1,3,1,1   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,25 1,3,1,2   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,26 2,1,1,0   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,27 2,1,1,1   2.25    tensorHalfConvolution   tensorConvApproxHalf
-perf,28 3,1,1,0   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,29 3,1,1,1   1.88    tensorHalfConvolution   tensorConvApproxHalf
-perf,30 3,1,1,2   1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,32 1,1,2,1     1.88    tensorHalfConvolution   tensorConvApproxHalf
-samp,36 1,1,4,3     1.88    tensorHalfConvolution   tensorConvApproxHalf
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32.txt
deleted file mode 100644
index 78f3e361ee8a96c6520793b435815210e1fc7117..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-perf,20 1,1,1,1   2.25    tensorConvolution   tensorConvApprox
-perf,21 1,2,1,0   2.25    tensorConvolution   tensorConvApprox
-perf,22 1,2,1,1   2.25    tensorConvolution   tensorConvApprox
-perf,23 1,3,1,0   1.88    tensorConvolution   tensorConvApprox
-perf,24 1,3,1,1   1.88    tensorConvolution   tensorConvApprox
-perf,25 1,3,1,2   1.88    tensorConvolution   tensorConvApprox
-perf,26 2,1,1,0   2.25    tensorConvolution   tensorConvApprox
-perf,27 2,1,1,1   2.25    tensorConvolution   tensorConvApprox
-perf,28 3,1,1,0   1.88    tensorConvolution   tensorConvApprox
-perf,29 3,1,1,1   1.88    tensorConvolution   tensorConvApprox
-perf,30 3,1,1,2   1.88    tensorConvolution   tensorConvApprox
-samp,31 1,1,2,0     1.88    tensorConvolution   tensorConvApprox
-samp,32 1,1,2,1     1.88    tensorConvolution   tensorConvApprox
-samp,33 1,1,4,0     1.88    tensorConvolution   tensorConvApprox
-samp,34 1,1,4,1     1.88    tensorConvolution   tensorConvApprox
-samp,35 1,1,4,2     1.88    tensorConvolution   tensorConvApprox
-samp,36 1,1,4,3     1.88    tensorConvolution   tensorConvApprox
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_baseline.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_baseline.txt
deleted file mode 100644
index df001ba497d0ed440dd34beead33d607651d3f35..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_baseline.txt
+++ /dev/null
@@ -1 +0,0 @@
-perf,20 1,1,1,1   2.25    tensorConvolution   tensorConvApprox
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_old.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_old.txt
deleted file mode 100644
index 36a7dbca05ef71b6046a91066acf5382f2a5c7a3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_old.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-perf,20 1,1,0   2.25    tensorConvolution   tensorConvPerfCuda
-perf,21 1,2,0   2.25    tensorConvolution   tensorConvPerfCuda
-perf,22 1,2,1   2.25    tensorConvolution   tensorConvPerfCuda
-perf,23 1,3,0   1.88    tensorConvolution   tensorConvPerfCuda
-perf,24 1,3,1   1.88    tensorConvolution   tensorConvPerfCuda
-perf,25 1,3,2   1.88    tensorConvolution   tensorConvPerfCuda
-perf,26 2,1,0   2.25    tensorConvolution   tensorConvPerfCuda
-perf,27 2,1,1   2.25    tensorConvolution   tensorConvPerfCuda
-perf,28 3,1,0   1.88    tensorConvolution   tensorConvPerfCuda
-perf,29 3,1,1   1.88    tensorConvolution   tensorConvPerfCuda
-perf,30 3,1,2   1.88    tensorConvolution   tensorConvPerfCuda
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_to_fp16.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_to_fp16.txt
deleted file mode 100644
index 913397cc4936bf11f3eefa15b5804700865e7b6b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_to_fp16.txt
+++ /dev/null
@@ -1 +0,0 @@
-fp16,12 0   1.5     tensorConvolution   tensorHalfConvolution
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_vgg16.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_vgg16.txt
deleted file mode 100644
index 6fbab7d7b85255cd86748634faea0bf48ed75e42..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_fp32_vgg16.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-perf,20 1,1,1,1   2.25    tensorConvolution   tensorConvApprox
-perf,21 1,2,1,0   2.25    tensorConvolution   tensorConvApprox
-perf,22 1,2,1,1   2.25    tensorConvolution   tensorConvApprox
-perf,23 1,3,1,0   1.88    tensorConvolution   tensorConvApprox
-perf,24 1,3,1,1   1.88    tensorConvolution   tensorConvApprox
-perf,25 1,3,1,2   1.88    tensorConvolution   tensorConvApprox
-perf,26 2,1,1,0   2.25    tensorConvolution   tensorConvApprox
-perf,27 2,1,1,1   2.25    tensorConvolution   tensorConvApprox
-perf,28 3,1,1,0   1.88    tensorConvolution   tensorConvApprox
-perf,29 3,1,1,1   1.88    tensorConvolution   tensorConvApprox
-perf,30 3,1,1,2   1.88    tensorConvolution   tensorConvApprox
-samp,32 1,1,2,1     1.88    tensorConvolution   tensorConvApprox
-samp,36 1,1,4,3     1.88    tensorConvolution   tensorConvApprox
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_test.txt b/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_test.txt
deleted file mode 100644
index 68686b25de1c607e34d75044cd7ff19cf0c8890a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/knob_config_test.txt
+++ /dev/null
@@ -1 +0,0 @@
-fp16,12 0   1.5     tensorHalfConvolution   tensorHalfConvolution
diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/source_code_autogenerator.py b/llvm/projects/hpvm-tensor-rt/code_autogenerators/source_code_autogenerator.py
deleted file mode 100644
index 589cdd0f4fe05cb8e9844ba9ac3dccd73133f09f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/source_code_autogenerator.py
+++ /dev/null
@@ -1,463 +0,0 @@
-# Input: file of the following table format
-#   id    knob configurations (arbitrary # of columns)   orig_func_name     new_func_name 
-# Input: file containing list of filenames to generate modified sources for 
-# Generates:
-#   a new directory called <original_source_nane>_different_knobs
-#   files named <original_source_name>_<id>.txt within their respective directories
-
-import glob
-import sys
-import os
-import re
-import shutil
-
-class Approx:
-    FP32 = 0
-    FP16 = 1
-    PERF = 2
-    SAMP = 3
-
-class KnobConfiguration:
-    '''
-    Stores the configurations as well as other useful information for each knob configuration
-    Stores: id (may factor out if ids are guaranteed to start at 0/1 and be consecutive)
-            original function name
-            modified function name
-            new function parameters (knobs)
-            new function call (modified function name(knobs)) 
-    '''
-    def __init__(self, raw_config):
-        '''
-        Args: raw_config = line of configuration file to parse
-        '''
-        line_as_lst = raw_config.strip().split()
-        # approx,<id> knob1,knob2,etc IGNORE old_fun_name new_fun_name
-
-        approx_id_lst = line_as_lst[0].split(',')
-        assert len(approx_id_lst) == 2
-
-        self.id = int(approx_id_lst[1])
-
-        if approx_id_lst[0] == "fp32":
-            self.approx = Approx.FP32
-            self.filename_ext = "fp32_converted"
-            return # special case 
-        elif approx_id_lst[0] == "fp16":
-            self.approx = Approx.FP16
-            self.filename_ext = "fp16_converted"
-            return # special case
-        elif approx_id_lst[0] == "perf":
-            self.approx = Approx.PERF
-        elif approx_id_lst[0] == "samp":
-            self.approx = Approx.SAMP
-
-        self.orig_func_name = line_as_lst[-2] # Second to last element
-        self.modified_func_name = line_as_lst[-1] # Last element  
-        self.params = line_as_lst[1].split(",") # First element = knob configuration 
-        self.filename_ext = approx_id_lst[0] + "_" + "_".join(self.params) # approx_method_knobs
-
-
-    # DEBUG
-    def __repr__(self):
-        if self.approx == Approx.FP32:
-            return "FP32"
-        elif self.approx == Approx.FP16:
-            return "FP16"
-
-        approx_type = None
-        if self.approx == Approx.PERF:
-            approx_type = "PERF"
-        elif self.approx == Approx.SAMP:
-            approx_type = "SAMP"
-        return "Approx: %s, ID: %d, Orig func nane: %s, Modified func nane: %s, Params: %s" \
-                % (approx_type, self.id, self.orig_func_name, self.modified_func_name, \
-                   ', '.join(self.params))
-
-
-def get_new_path(old_path, orig_source_code_dir):
-    '''
-    Returns a path that's compatible with the location of the generated source code
-
-    Args:
-        old_path: Original path of file that's being included
-        orig_source_code_dir: Path to original source code dir wrt the current dir
-    '''
-    if os.path.isabs(old_path): # Old path works
-        return old_path 
-    # Adding an extra .. because the path should be wrt the generated directory
-    return os.path.join("..", orig_source_code_dir, old_path)
-
-
-# "complete_line" = a valid line of code  
-def get_new_function_calls(complete_line, knob_config):
-    '''
-    Returns a copy of an inputted line of code such that all instances of old 
-    function calls are replaced with newFunctionCall(old params, knobs)
-
-    Note: The old calls aren't completely overriden, as we still need the old parameters but
-    insert new parameters as well
-
-    Args:
-        complete_line: A complete line of code to process
-        knob_config: KnobConfiguration object representing current configuration 
-    '''
-    orig_func_ind = complete_line.find(knob_config.orig_func_name)
-    new_line = []
-    line_start_ind = 0
-    last_ind = 0
-
-    while orig_func_ind != -1:
-        new_line.append(complete_line[line_start_ind : orig_func_ind])
-        line_start_ind = complete_line.find(")", orig_func_ind) + 1 
-        
-        old_func_call = complete_line[complete_line.find("(", orig_func_ind): line_start_ind]
-        if knob_config.modified_func_name == knob_config.orig_func_name:
-            # count the number of new parameters
-            num_repl_params = len(knob_config.params)
-            old_func_params = old_func_call.strip().split(',')
-            new_line.append("%s%s, %s)" % (knob_config.modified_func_name, ', '.join(old_func_params[:-num_repl_params]), ', '.join(knob_config.params)))
-
-        else:
-            new_line.append("%s%s, %s)" % (knob_config.modified_func_name, old_func_call[:-1], ', '.join(knob_config.params)))
-        orig_func_ind = complete_line.find(knob_config.orig_func_name, line_start_ind)
-    new_line.append(complete_line[line_start_ind : ])
-    return ''.join(new_line)
-
-
-def convert_local_paths(file_contents, orig_source_dir): 
-    '''
-    Converts all local paths wrt the original source file's directory to paths compatible
-    with the current source code directory
-
-    Args:
-        file_contents: String containing source code read from file
-        orig_source_dir: Path of original source code dir wrt the current directory 
-    '''
-    last_include_ind = file_contents.rfind("#include")
-    last_include_newline_ind = file_contents.find("\n", last_include_ind)
-    include_lines = file_contents[ : last_include_newline_ind].split("\n")
-    
-    new_file_contents = []
-    for line in include_lines:
-        if line.startswith("#"):
-            include_file = line.split()[1]
-            if include_file.startswith("\""):
-                new_include_path = get_new_path(include_file.replace("\"", ""), orig_source_dir.replace("\"", ""))
-                new_file_contents.append("#include \"%s\"\n" % new_include_path)
-            else:
-                new_file_contents.append(line)
-    new_file_contents.append(file_contents[last_include_newline_ind : ])
-    return '\n'.join(new_file_contents)
-
-
-def get_tensor_operation(line):
-    tensor_func_call = None
-    tensor_op_name = None
-    start_ind = None
-    end_ind = None
-
-    start_ind = line.find("tensor")
-    if start_ind != -1:
-        end_ind = line.find('(')
-        tensor_op_name = line[start_ind + len("tensor") : end_ind]
-        if tensor_op_name[0].isupper(): # crude way of checking whether we have a camel cased method
-            end_ind = line.find('(')
-            tensor_func_call = line[start_ind : end_ind]
-    return tensor_func_call, tensor_op_name, start_ind, end_ind
-
-
-def generate_fp32_source(new_file, source_file, orig_source_dir):
-    # Copy the source code over 
-    new_file_contents = convert_local_paths(source_file.read(), orig_source_dir)
-    new_file.write(new_file_contents)
-
-
-def generate_fp16_source(knob_config, new_file, source_file, orig_source_dir):
-    file_contents = convert_local_paths(source_file.read(), orig_source_dir).split('\n')
-
-    new_file_contents = []
-    for line in file_contents:
-        tensor_func_call, tensor_op_name, start_ind, end_ind = get_tensor_operation(line)
-        # tensorHalfSoftmax doesn't exist 
-        if line.find("Softmax") == -1 and tensor_func_call:
-            new_file_contents.append(line[ : start_ind] + "tensorHalf" + tensor_op_name \
-                        + line[end_ind : ])
-        else:
-            new_file_contents.append(line)
-    new_file.write('\n'.join(new_file_contents))
-
-
-def generate_approx_source(knob_config, new_file, source_file, orig_source_dir):
-    new_file_contents = []
-
-    # Store complete line to handle cases where one line of code is split into two lines
-    complete_line = ""
-    for line in source_file:
-        # Replace the current path of the local include with a path that's compatible 
-        # with the location of the generated source code 
-        if line.startswith("#"):
-            include_file = line.split()[1]
-            if include_file.startswith("\""):
-                new_include_path = get_new_path(include_file.replace("\"", ""), orig_source_dir.replace("\"", ""))
-                new_file_contents.append("#include \"%s\"\n" % new_include_path)
-            else:
-                new_file_contents.append(line)
-            continue
-
-        # Handles case where 1 actual line of code is split into 2 lines 
-        elif line.find("}") != -1 or line.find("{") != -1:
-            complete_line += line
-            new_file_contents.append(complete_line)
-            complete_line = ""
-            continue
-
-        elif line.find(";") == -1: # Last char is always \n
-            complete_line += line
-            continue
-
-        complete_line += line
-        orig_func_ind = complete_line.find(knob_config.orig_func_name)
-        if orig_func_ind != -1:
-            new_file_contents.append(get_new_function_calls(complete_line, knob_config))
-        else:
-            new_file_contents.append(complete_line)
-        complete_line = ""
-    new_file.write(''.join(new_file_contents))
-
-
-def generate_source_code(table, dir_name, filename, source_name, profile_per_tensor):
-    '''
-    Generates source code for all configurations in the table for one original source 
-    Args
-        table: List of KnobConfigurations
-        dir_name: Directory new sources should be placed in
-        filename: Filename of original source
-        source_name: Filename without the file extension (ex: foo/blah.cc --> blah)
-    '''
-    source_file = open(filename, "r") 
-    orig_source_dir = os.path.dirname(filename)
-
-    for knob_config in table:
-        source_file.seek(0, 0)
-        new_filename = os.path.join(dir_name, "%s_%s.cc" % (source_name, knob_config.id))
-        #new_filename = os.path.join(dir_name, "%s_%s.cc" % (source_name, knob_config.filename_ext)) 
-        new_file = open(new_filename, "w")
-        if knob_config.approx == Approx.FP16:
-            generate_fp16_source(knob_config, new_file, source_file, orig_source_dir)
-        elif knob_config.approx == Approx.FP32:
-            generate_fp32_source(new_file, source_file, orig_source_dir)
-        elif knob_config.approx == Approx.PERF or knob_config.approx == Approx.SAMP:
-            generate_approx_source(knob_config, new_file, source_file, orig_source_dir)
-
-        new_file.close() # Need to flush
-
-        if profile_per_tensor:
-            add_profiling_calls_per_tensor(new_filename)
-        print("Generated source code as %s" % new_filename)
-    source_file.close()
-
-
-def generate_all_sources(table, orig_files_filename, profile_per_tensor):
-    '''
-    Generates directories and source code for all original sources for all knob configurations
-    Args:
-        table: List of KnobConfiguration objects
-        orig_files_filename: Filename of file containing all original source names to generate new
-               sources for 
-    '''
-    orig_files = open(orig_files_filename, "r")
-    for orig_filename in orig_files:
-        orig_filename = orig_filename.strip()
-
-        # Source name = original filename without the .cc 
-        last_slash_ind = orig_filename.rfind("/")
-        file_ext_ind = orig_filename.find(".cc")
-        if last_slash_ind == -1:
-            source_name = orig_filename[ : file_ext_ind]
-        else:
-            source_name = orig_filename[last_slash_ind + 1 : file_ext_ind]
-        print("Source name: %s" % source_name)
-       
-        # Start with a clean directory
-        dir_name = "%s_autogenerated_knobs" % source_name
-        print("Setting up directory: %s" % dir_name)
-        if os.path.isdir(dir_name):
-            print("Directory exists: clearing everything")
-            for old_file in glob.glob(os.path.join(dir_name, "*")):
-                os.remove(old_file)
-
-        else:
-            print("Generating directory: %s" % dir_name)
-            os.makedirs(dir_name)
-            
-        generate_source_code(table, dir_name, orig_filename, source_name, profile_per_tensor)
-        print("\n")
-    orig_files.close()
-
-
-# This is a lazy approach but it works so ...
-def add_profiling_calls_per_tensor(source_filename):
-    source_file = open(source_filename, "r")
-    orig_source = source_file.read().split('\n')
-    source_file.close()
-
-    modified_source = []
-
-    init_profiler_cmd = "%sProfiler profiler;"
-    start_profiler_call = "%sprofiler.start_profiler();"
-    resume_profiler_call = "%sprofiler.resume_profiler();"
-    pause_profiler_call = "%sprofiler.pause_profiler();"
-    stop_profiler_call = "%sprofiler.stop_profiler();"
-    time_energy_profiler_call = "%sauto time_energy_%d = profiler.get_time_energy();"
-    reset_profiler_call = "%sprofiler.reset();"
-
-    time_var_decl = "%sdouble %s_time = 0.0;"
-    time_energy_decl = "%sdouble %s_energy = 0.0;"
-
-    time_incr_cmd = "%s%s_time += time_energy_%d.first;"
-    energy_incr_cmd = "%s%s_energy += time_energy_%d.second;"
-
-    output_per_tensor = "%sstd::cout << \"Operation %s, time: \" << (%s_time) / total_runs <<\", energy: \" << (%s_energy) / total_runs << std::endl; "
-    total_output = "%sstd::cout << \"Total %s: \" << (%s) / total_runs << std::endl;"
-
-    time_energy_count = 0
-
-    tensor_operations = set() 
-    for line in orig_source:
-        line = line.strip()
-        tensor_func_call, _, _, _ = get_tensor_operation(line)
-        if tensor_func_call: tensor_operations.add(tensor_func_call)
-
-    inserted_end_profiler_call = False
-    has_seen_for_loop = False 
-    close_bracket_count = 0
-    line_ind = 0
-
-    while line_ind < len(orig_source):
-        line = orig_source[line_ind]
-        num_leading_spaces = len(line) - len(line.lstrip())
-        leading_spaces_str = ' '.join(["" for _ in range(num_leading_spaces)])
-
-        if line.find("for") != -1:
-            has_seen_for_loop = True
-
-        if has_seen_for_loop and line.find("}") != -1:
-            close_bracket_count += 1
-
-        if line.find("#include") != -1:
-            modified_source.append(leading_spaces_str + line)
-            line_ind += 1
-            continue
-
-        if line.find("profiler") != -1 or line.find("total_time") != -1 or line.find("total_energy") != -1:
-            line_ind += 1
-            continue
-
-        if line.find("int total_runs") != -1:
-            # Now we insert the counters
-            for op_name in tensor_operations:
-                modified_source.append(time_var_decl % (leading_spaces_str, op_name))
-                modified_source.append(time_energy_decl % (leading_spaces_str, op_name))
-            modified_source.append(line)
-            modified_source.append(init_profiler_cmd % leading_spaces_str)
-            modified_source.append(start_profiler_call % leading_spaces_str)
-            line_ind += 1
-            continue
-
-        if close_bracket_count == 2 and not inserted_end_profiler_call: # NOTE this breaks if there are helper methods/scopes
-            modified_source.append(line)
-            total_time_str = []
-            total_energy_str = []
-
-            for op_name in tensor_operations:
-                modified_source.append(output_per_tensor % (leading_spaces_str, op_name, op_name, op_name))
-                total_time_str.append("%s_time" % op_name)
-                total_energy_str.append("%s_energy" % op_name)
-
-            modified_source.append(total_output % (leading_spaces_str, "time", ' + '.join(total_time_str)))
-            modified_source.append(total_output % (leading_spaces_str, "energy", ' + '.join(total_energy_str)))
-
-            modified_source.append(stop_profiler_call % leading_spaces_str)
-            line_ind += 1
-            inserted_end_profiler_call = True
-            continue
-
-        tensor_ind = line.find("tensor")
-        if tensor_ind == -1:
-            modified_source.append(line)
-            line_ind += 1
-            continue
-
-        word_after_tensor = line[tensor_ind + len("tensor")]
-        if word_after_tensor[0].isupper(): # crude way of checking whether we have a camel cased method
-            tensor_op = line[tensor_ind : line.find('(')]
-
-            modified_source.append(resume_profiler_call % leading_spaces_str)
-            modified_source.append(line) 
-            
-            # Address one line that's split up into 2 lines for readability 
-            if line.find(")") == -1 and line_ind + 1 < len(orig_source) \
-                        and orig_source[line_ind + 1].find(")") != -1:
-                line_ind += 1
-                modified_source.append(orig_source[line_ind]) 
-
-            modified_source.append(pause_profiler_call % leading_spaces_str)
-            modified_source.append(time_energy_profiler_call % (leading_spaces_str, time_energy_count))
-            modified_source.append(time_incr_cmd % (leading_spaces_str, tensor_op, time_energy_count))
-            modified_source.append(energy_incr_cmd % (leading_spaces_str, tensor_op, time_energy_count))
-            modified_source.append(reset_profiler_call % leading_spaces_str)
-            modified_source.append("")
-
-            time_energy_count += 1
-        else:
-            modified_source.append(line)
-        line_ind += 1
-
-    source_file = open(source_filename, "w")
-    source_file.write('\n'.join(modified_source))
-    source_file.close()
-
-
-def parse_table(table_filename):
-    '''
-    Given the filename of a table, parses the table into a list of KnobConfigurations 
-    '''
-    # Can we assume that the ids always start at 1 --> if so, can index by knobs 
-    # else: need to use a dict
-    table = []
-    table_file = open(table_filename, "r")
-    for raw_config in table_file:
-        table.append(KnobConfiguration(raw_config))
-    table_file.close()  
-    return table
-   
-
-def delete_autogenerated_dirs():
-    for dir_name in os.listdir("."):
-        if dir_name.endswith("profiling_autogenerated_knobs"):
-            print("DELETING %s" % dir_name)
-            shutil.rmtree(dir_name)
-    print("DONE")
-
-if __name__ == "__main__":
-    num_args = len(sys.argv)
-
-    if num_args == 2 and sys.argv[1] == "clean":
-        delete_autogenerated_dirs()
-        exit(0)
-
-    if num_args != 3 and num_args != 4:
-        print("Usage: python source_code_autogenerator.py <table file> <original filenames file> [per_tensor]")
-        print("To delete autogen dirs: python source_code_autogenerator.py clean")
-
-        if num_args >= 2 and sys.argv[1] == "--usage":
-            print("Table file format: <id> <knob configurations separated by spaces> <orig func name> <new func name>")
-            print("Original filenames file: <original_filename><newline> etc")
-        else:
-            print("Run with --usage flag for more detailed information")
-        exit(1)
-
-    profile_per_tensor = num_args == 4 and sys.argv[3] == "per_tensor"
-
-    table = parse_table(sys.argv[1])
-    generate_all_sources(table, sys.argv[2], profile_per_tensor)
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/include/op_overheads.h b/llvm/projects/hpvm-tensor-rt/dnn_sources/include/op_overheads.h
deleted file mode 100644
index 4eaf88e6d613c51a5a75ef8ce73b55a3410f1dbd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/include/op_overheads.h
+++ /dev/null
@@ -1,148 +0,0 @@
-
-
-#ifndef OP_OVERHEADS_HEADER
-#define OP_OVERHEADS_HEADER
-
-
-#include <sstream>
-#include "../../tensor_runtime/include/tensor.h"
-#include "types.h"
-
-
-float scale_down_factor = 10000.0;
-float error_factor = 0.1;
-std::string result_str = "";
-
-
-// TODO: Every routine needs testing
-
-
-// private function
-static float getScaledComps(double total_comps, int error_scale){
-
-  total_comps = total_comps / scale_down_factor;
-  float comp_scale = 1.0 + (error_factor * error_scale);
-  total_comps = total_comps / comp_scale;
-
-  return total_comps;
-}
-
-
-static void addNormToResult(float comps){
-
-  std::ostringstream ss;
-  ss << std::fixed << comps;
-  
-  result_str.append( std::string(ss.str()) );
-  result_str.append("\t");
-}
-
-
-
-static void addCompsToResult(float comps){
-
-  std::ostringstream ss;
-  ss << std::fixed << comps;
-  
-  result_str.append( std::string(ss.str()) );
-  result_str.append("\n");
-}
-
-
-void add_conv_overheads(void* input_ptr, void* filter_ptr,
-			int strideA, int strideB, int error_scale){
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* filter = (Tensor*) filter_ptr;
-  
-}
-
-
-void add_gemm_overheads(void* lhs_ptr, void* rhs_ptr, int error_scale){
-
-  Tensor* lhs = (Tensor*) lhs_ptr;
-  Tensor* rhs = (Tensor*) rhs_ptr;
-    
-  int m = lhs->dims.dim_sizes[0];
-  // The rhs last dimension must contain the neurons
-  int n = rhs->dims.dim_sizes[rhs->dims.num_dims-1]; // output neurons
-  int k = 1;
-  
-  // Flattening the dimensions after the batch dimension
-  for (int j = 1 ; j < lhs->dims.num_dims; j++){
-    k = k * lhs->dims.dim_sizes[j]; // input neurons
-  }
-
-  int rhs_k = rhs->dims.dim_sizes[rhs->dims.num_dims-2];
-  // Dimension-note: Check if k is same across the two tensors
-  printf("m = %d, n = %d, k = %d \n", m, n, k);
-  
-  if(rhs_k != k){
-    printf("rhs=%d and lhs=%d columns/rows don't match", rhs_k, k);
-    abort();
-  }
-  
-  double total_comps = m * n * rhs_k * 1.0;
-  float scaled_comps = getScaledComps(total_comps, error_scale);
-  
-  printf("error_scale = %d, total_comps = %f, scaled_comps = %f \n",
-	 error_scale, total_comps, scaled_comps);
-
-  addCompsToResult(scaled_comps);
-  
-}
-
-
-void add_bias_overheads(void* input_ptr, int error_scale){
-
-  Tensor* input = (Tensor*) input_ptr;
-  
-  double total_comps = input->num_elems;
-  float scaled_comps = getScaledComps(total_comps, error_scale);
-
-  printf("error_scale = %d, total_comps = %f, scaled_comps = %f \n",
-	 error_scale, total_comps, scaled_comps);
-
-  addCompsToResult(scaled_comps);
-
-}
-
-
-void add_relu_overheads(void* input_ptr, int error_scale){
-  
-  Tensor* input = (Tensor*) input_ptr;
-  
-  double total_comps = input->num_elems;
-  float scaled_comps = getScaledComps(total_comps, error_scale);
-
-  printf("error_scale = %d, total_comps = %f, scaled_comps = %f \n",
-	 error_scale, total_comps, scaled_comps);				     
-
-  addCompsToResult(scaled_comps);
-
-}
-
-float add_pool_overheads(void* input_ptr, int kernel_size,
-			 int stride_size, int error_scale){
-
-}
-
-
-void add_norms(void* norms_ptr){
-
-  Norm_t* norms = (Norm_t*) norms_ptr;
-
-  addNormToResult(norms->l1_norm);
-  addNormToResult(norms->l2_norm);
-  addNormToResult(norms->inf_norm);
- 
-}
-
-void dump_result(char* file_name){
-
-  FILE* fp = fopen(file_name, "w+");
-  fwrite(result_str.c_str(), 1, result_str.length(), fp);
-  fclose(fp); 
-}
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/include/types.h b/llvm/projects/hpvm-tensor-rt/dnn_sources/include/types.h
deleted file mode 100644
index 3e4f64610da64fb04b6270035da8557e940eb7e2..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/include/types.h
+++ /dev/null
@@ -1,39 +0,0 @@
-
-#ifndef TYPES_HEADER
-#define TYPES_HEADER
-
-/*
-struct Dimension_t{
-  int num_dims;
-  size_t* dim_sizes;
-};
-
-
-struct Tensor_t{
-  int tensor_id; // used for indexing (in the tensor runtime)
-  int data_type; // {float_type, double_type, half_type, int_type}
-  int data_format; // {nchw, nhwc}
-  void* host_data;
-  size_t num_elems; // Total elements
-  size_t size_in_bytes; // Total size in bytes
-  struct Dimension_t dims;
-};
-
-
-
-enum Tensor_type_t{
-  float_type,
-  double_type,
-  half_type,
-  int_type
-};
-
-
-// NOTE: Currently only NCHW is supported due to limited cuDNN support
-enum Tensor_format_t{
-  nchw,
-  nhwc 
-};
-*/
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/include/utils.h b/llvm/projects/hpvm-tensor-rt/dnn_sources/include/utils.h
deleted file mode 100644
index a627f83e6b2aa9f38b09d82ee94ce35da1a6bafe..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/include/utils.h
+++ /dev/null
@@ -1,855 +0,0 @@
-
-// Header guards
-#ifndef UTILS_HEADER
-#define UTILS_HEADER
-
-
-#include <sstream>
-#include <vector>
-#include <bits/stdc++.h>
-#include <tensor_runtime.h>
-#include <tensor.h>
-#include <cmath>
-
-
-std::vector<float> run_accuracies;
-
-
-void printTensorInfo(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-
-  if(tensor->gpu_data != NULL){
-    printf("Successful cudaMalloc \n");
-  }
-
-  printf("tensor dims = %d \n", tensor->dims.num_dims);
-  printf("dim1_size = %lu \n", tensor->dims.dim_sizes[0]);
-  printf("dim2_size = %lu \n", tensor->dims.dim_sizes[1]);
-  printf("num_elems = %lu \n", tensor->num_elems);
-}
-
-
-// FIXIT: Move this to debug.h and include in all files
-void dumpWeightsToFile(char* file_name, void* weights_ptr){
-
-  struct Tensor* weights = (Tensor*) weights_ptr;
-  // Move data back to host
-  hpvm_request_tensor(weights, 0);
-  
-  FILE* fp = fopen(file_name, "wb");
-  if(fp == NULL){
-    printf("File %s could not be created. Check if directory exists \n", file_name);
-    abort();
-  }
-
-  //printf("size_in_bytes = %lu \n", weights->size_in_bytes);
-  size_t bytes_written = fwrite(weights->host_data, 1, weights->size_in_bytes, fp);
-  //printf("bytes_written = %lu \n", bytes_written);
-  fclose(fp);
-}
-
-
-
-void fillTensorWithOnes(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-    
-  hpvm_request_tensor(tensor, 0);
-  
-  // initialization is specific to the floating point type
-  if(tensor->data_type == CUDNN_DATA_FLOAT){
-    float* data_arr = (float*) tensor->host_data;
-    for(unsigned int i = 0; i < tensor->num_elems; i++){
-      data_arr[i] = 1.0;    
-    }
-  }
-}
-
-
-void fillWithOnesAndTwos(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-  
-  hpvm_request_tensor(tensor, 0);
-  
-  // initialization is specific to the floating point type
-  if(tensor->data_type == CUDNN_DATA_FLOAT){
-    float* data_arr = (float*) tensor->host_data;
-    for(unsigned int i = 0; i < tensor->num_elems/2; i++){
-      data_arr[i] = 1.0;    
-    }
-
-    for(unsigned int i = tensor->num_elems/2; i < tensor->num_elems; i++){
-      data_arr[i] = 2.0;    
-    }
- 
-  }
-}
-
-
-void fillTensorWithVal(void* tensor_ptr, float target_value){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-    
-  hpvm_request_tensor(tensor, 0);
-  
-  // initialization is specific to the floating point type
-  if(tensor->data_type == CUDNN_DATA_FLOAT){
-    float* data_arr = (float*) tensor->host_data;
-    for(unsigned int i = 0; i < tensor->num_elems; i++){
-      data_arr[i] = target_value;    
-    }
-  }
-}
-
-
-void fillTensorWithNegOnes(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-    
-  hpvm_request_tensor(tensor, 0);
-  
-  // initialization is specific to the floating point type
-  if(tensor->data_type == CUDNN_DATA_FLOAT){
-    float* data_arr = (float*) tensor->host_data;
-    for(unsigned int i = 0; i < tensor->num_elems; i++){
-      data_arr[i] = -1.0;    
-    }
-  }
-}
-
-
-void fillTensorVals(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-  // initialization is specific to the floating point type
-  if(tensor->data_type == CUDNN_DATA_FLOAT){
-    float* data_arr = (float*) tensor->host_data;
-    for(unsigned int i = 0; i < tensor->num_elems; i++){
-      data_arr[i] = i + 1;    
-    }
-  }
-}
-
-
-void printTensorValues(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-
-  hpvm_request_tensor(tensor, 0);
-  
-  // printing is specific to the floating point type
-  if(tensor->data_type == CUDNN_DATA_FLOAT){
-    float* data_arr = (float*) tensor->host_data;
-    for(unsigned int i = 0; i < tensor->num_elems; i++){
-      printf("%f,", data_arr[i]);    
-    }
-  }
-
-  printf("\n");
-}
-
-
-void printTensorDims(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-
-  printf("Num_elems = %lu \n", tensor->num_elems);
-  for (int i = 0; i < tensor->dims.num_dims; i++){
-    printf("dim[%d] = %lu \n", i, tensor->dims.dim_sizes[i]);
-  }
-}
-
-
-
-void compareTensors(void* tensor1_ptr, void* tensor2_ptr){
-
-  struct Tensor* tensor1 = (struct Tensor*) tensor1_ptr;
-  struct Tensor* tensor2 = (struct Tensor*) tensor2_ptr;
-
-  hpvm_request_tensor(tensor1, 0);
-  hpvm_request_tensor(tensor2, 0);
-
-  float* tensor_data1 = (float*) tensor1->host_data;
-  float* tensor_data2 = (float*) tensor2->host_data;
-  
-  for(unsigned int i = 0; i < tensor1->num_elems; i++){
-    if(tensor_data1[i] != tensor_data2[i]){
-      printf("Tensor data mismatch at index %d \n", i);
-      abort();
-    }
-  }
-}
-
-
-
-void compareValues(void* tensor_ptr, float* data, size_t num_elems){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-    
-  hpvm_request_tensor(tensor, 0);
-  
-  float* tensor_data = (float*) tensor->host_data;
-  for(unsigned int i = 0; i < num_elems; i++){
-    if(tensor_data[i] != data[i]){
-      printf("Tensor data mismatch");
-      abort();
-    }
-  }
-}
-
-
-void* readInputTensor(const char* file_name, int data_type, int dim1_size, int dim2_size,
-		      int dim3_size, int dim4_size){
-
-  int type_size = 4; // NOTE: Assuming floating point tensors
-  int num_elems = dim1_size * dim2_size * dim3_size * dim4_size;
-  int size_in_bytes = type_size * dim1_size * dim2_size * dim3_size * dim4_size;
-  uint8_t* file_data = (uint8_t*) malloc(sizeof(char) * num_elems);
-  float* tensor_data = (float*) malloc(sizeof(float) * num_elems);
-  int file_header_size = 16;
-  
-  FILE* file = fopen(file_name, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting... \n", file_name);
-    abort();
-  }
-
- 
-  fseek(file, file_header_size, SEEK_CUR); // Skipping the file header
-  size_t bytes_read = fread(file_data, 1, sizeof(uint8_t) * num_elems, file);
-
-  fclose(file);
-  
-  for (size_t i = 0; i < num_elems; ++i){
-    tensor_data[i] = (float) file_data[i] / 255.0f;
-  }
-
-  // NOTE: Using NCHW format
-  struct Tensor* input = (struct Tensor*) create4DTensor(data_type, nchw, dim1_size, dim2_size,
-					dim3_size, dim4_size);
-  
-  initTensorData(input, tensor_data, size_in_bytes);
-  //  compareValues(input, tensor_data, num_elems);
-  
-  return input;  
-}
-
-
-//*** FIXIT: Move this to CPU-only
-struct Tensor* readTrainedWeightsCPU(const char* file_name, int data_type,
-				     int dim1_size, int dim2_size,
-				     int dim3_size, int dim4_size){
-
-  // FIXIT: Don't assume floating point types
-  int type_size = 4; // NOTE: Assuming floating point tensors
-  long int num_elems = dim1_size * dim2_size * dim3_size * dim4_size;
-  long int size_in_bytes = type_size * dim1_size * dim2_size * dim3_size * dim4_size;
-  float* tensor_data = (float*) malloc(sizeof(float) * num_elems);
-  int file_header_size = 0;
-  
-  FILE* file = fopen(file_name, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting... \n", file_name);
-    abort();
-  }
-    
-  fseek(file, file_header_size, SEEK_CUR); // Skipping the file header
-  size_t bytes_read = fread(tensor_data, 1, size_in_bytes, file);
-
-  printf("size in bytes = %lu, bytes read = %lu \n", size_in_bytes, bytes_read);
-
-  fclose(file);
-  
-  
-  struct Tensor* weights = (struct Tensor*) create4DTensor(data_type, nchw, dim1_size, dim2_size,
-					                   dim3_size, dim4_size);
-  
-  initTensorData(weights, tensor_data, size_in_bytes);
-  //compareValues(weights, tensor_data, num_elems);
-  free(tensor_data);
-
-  return weights;
-}
-
-
-struct Tensor* readTrainedWeights(const char* file_name, int data_type,
-				  long int dim1_size, long int dim2_size,
-				  long int dim3_size, long int dim4_size){
-
-  // FIXIT: Don't assume floating point types
-  int type_size = 4; // NOTE: Assuming floating point tensors
-  long int num_elems = dim1_size * dim2_size * dim3_size * dim4_size;
-  long int size_in_bytes = type_size * dim1_size * dim2_size * dim3_size * dim4_size;
-  float* tensor_data = (float*) malloc(sizeof(float) * num_elems);
-  printf("size_in_bytes  = %lu \n", size_in_bytes);
-  
-  int file_header_size = 0;
-  
-  FILE* file = fopen(file_name, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting... \n", file_name);
-    abort();
-  }
-    
-  fseek(file, file_header_size, SEEK_CUR); // Skipping the file header
-  size_t bytes_read = fread(tensor_data, 1, size_in_bytes, file);
-
-  // printf("size in bytes = %lu, bytes read = %lu \n", size_in_bytes, bytes_read);
-
-  fclose(file);
-  
-  
-  struct Tensor* weights = (struct Tensor*) create4DTensor(data_type, nchw, dim1_size, dim2_size,
-					                   dim3_size, dim4_size);
-  
-  initTensorData(weights, tensor_data, size_in_bytes);
-  //compareValues(weights, tensor_data, num_elems);
-  free(tensor_data);
-
-  return weights;
-}
-
-
-
-
-struct Tensor* readInputBatch(const char* file_name, int data_type,
-			      int start, int end,
-			      int dim2_size, int dim3_size, int dim4_size){
-
-  int dim1_size = end - start;
-  // FIXIT: Don't assume floating point types
-  int type_size = 4; // NOTE: Assuming floating point tensors
-  long int num_elems = dim1_size * dim2_size * dim3_size * dim4_size;
-  long int size_in_bytes = type_size * dim1_size * dim2_size * dim3_size * dim4_size;
-  float* tensor_data = (float*) malloc(sizeof(float) * num_elems);
-  int file_header_size = type_size * start * dim2_size * dim3_size * dim4_size;
-  
-  FILE* file = fopen(file_name, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting... \n", file_name);
-    abort();
-  }
-    
-  fseek(file, file_header_size, SEEK_SET); // Skipping the file header
-  size_t bytes_read = fread(tensor_data, 1, size_in_bytes, file);
-
-
-  fclose(file);
-  
-  
-  struct Tensor* weights = (struct Tensor*) create4DTensor(data_type, nchw, dim1_size, dim2_size,
-					                   dim3_size, dim4_size);
-  
-  initTensorData(weights, tensor_data, size_in_bytes);
-  free(tensor_data);
-
-  return weights;
-}
-
-
-
-void* copyInputBatch(const char* file_name, 
-		    int start, int end,
-		    int dim2_size, int dim3_size, int dim4_size,
-		    void* inputTensor_ptr){
-
-  struct Tensor* inputTensor = (struct Tensor*) inputTensor_ptr;
-  
-  int dim1_size = end - start;
-  // FIXIT: Don't assume floating point types
-  int type_size = 4; // NOTE: Assuming floating point tensors
-  long int num_elems = dim1_size * dim2_size * dim3_size * dim4_size;
-  long int size_in_bytes = type_size * dim1_size * dim2_size * dim3_size * dim4_size;
-  float* tensor_data = (float*) malloc(sizeof(float) * num_elems);
-  int file_header_size = type_size * start * dim2_size * dim3_size * dim4_size;
-  
-  FILE* file = fopen(file_name, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting... \n", file_name);
-    abort();
-  }
-    
-  fseek(file, file_header_size, SEEK_SET); // Skipping the file header
-  size_t bytes_read = fread(tensor_data, 1, size_in_bytes, file);
-
-  fclose(file);
-  
-    
-  initTensorData(inputTensor, tensor_data, size_in_bytes);
-  free(tensor_data);
-
-  printf("******NOTE: tensor Dims = %d \n", inputTensor->dims.num_dims);
-  if(inputTensor->host_data == NULL || inputTensor->gpu_data == NULL)
-    printf("ERROR: NULL data pointers \n");
-
-
-  // Chaning Tensor Placement to HOST 
-  changeTensorPlacement(inputTensor, HOST);
-
-
-  return inputTensor;
-}
-
-
-
-uint8_t* readLabels(const char* labels_file, int num_labels){
-
-  uint8_t* labels = (uint8_t*) malloc(sizeof(uint8_t) * num_labels);
-  FILE* file = fopen(labels_file, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting...\n", labels_file);
-    abort();
-  }
-
-  size_t bytes_read = fread(labels, 1, sizeof(uint8_t) * num_labels, file);
-
-  fclose(file);
-  
-  return labels;
-}
-
-
-
-uint32_t* readLabels3(const char* labels_file, int num_labels){
-
-  uint32_t* labels = (uint32_t*) malloc(sizeof(uint32_t) * num_labels);
-  FILE* file = fopen(labels_file, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting...\n", labels_file);
-    abort();
-  }
-
-  size_t bytes_read = fread(labels, 1, sizeof(uint32_t) * num_labels, file);
-
-  fclose(file);
-  
-  return labels;
-}
-
-
-uint8_t* readLabelsBatch(const char* labels_file, int start, int end){
-
-  int num_labels = end - start;
-  int file_header_size = sizeof(uint8_t) * start;
-  
-  uint8_t* labels = (uint8_t*) malloc(sizeof(uint8_t) * num_labels);
-  FILE* file = fopen(labels_file, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting...\n", labels_file);
-    abort();
-  }
-  
-  fseek(file, file_header_size, SEEK_SET); // Skipping the file header
-    
-  size_t bytes_read = fread(labels, 1, sizeof(uint8_t) * num_labels, file);
-
-
-  fclose(file);
-  
-  // printf("--labels bytes_read = %lu \n", bytes_read);
-  return labels;
-}
-
-
-uint32_t* readLabelsBatch3(const char* labels_file, int start, int end){
-
-  int num_labels = end - start;
-  int file_header_size = sizeof(uint32_t) * start;
-  
-  uint32_t* labels = (uint32_t*) malloc(sizeof(uint32_t) * num_labels);
-  FILE* file = fopen(labels_file, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting...\n", labels_file);
-    abort();
-  }
-  
-  fseek(file, file_header_size, SEEK_SET); // Skipping the file header
-    
-  size_t bytes_read = fread(labels, 1, sizeof(uint32_t) * num_labels, file);
-
-
-  fclose(file);
-  
-  return labels;
-}
-
-
-
-void computeAccuracy(const char* labels_file, int num_labels, void* result_ptr){
-
-  struct Tensor* result = (struct Tensor*) result_ptr;
-  
-  uint8_t* labels = readLabels(labels_file, num_labels);
-  size_t batch_dim = result->dims.dim_sizes[0];
-  size_t channels = result->dims.dim_sizes[1];
-  float* data = (float*) result->host_data;
-  int num_errors = 0;
-  
-  for(int i = 0; i < batch_dim; i++){
-    int chosen = 0;
-    for (int id = 1; id < 10; ++id){
-      if (data[i * channels + chosen] < data[i * channels + id]) chosen = id;
-    }
-    
-    //printf("chosen = %d, label = %d \n", chosen, labels[i]);
-    if(chosen != labels[i])
-      num_errors++;
-  }
-
-  float accuracy = ((batch_dim - num_errors) * 1.0 / batch_dim * 1.0) * 100.0;
-  printf("****** Accuracy = %f \n\n", accuracy);
-
-
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-
-    std::ostringstream ss;
-    ss << std::fixed << accuracy;
-    std::string print_str = ss.str();
-  
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-    fclose(fp);
-  }
-  
-}
-
-
-
-
-// NOTE: batch_size and num_classes are Unused arguments 
-float computeAccuracy2(uint8_t* labels, int batch_size,
-		       void* result_ptr, size_t num_classes = 10){
-
-  struct Tensor* result = (struct Tensor*) result_ptr;
-  
-  size_t batch_dim = result->dims.dim_sizes[0];
-  num_classes = result->dims.dim_sizes[1];
-  float* data = (float*) result->host_data;
-  int num_errors = 0;
-
-  printf("batch_dim = %lu, channels = %lu \n", batch_dim, num_classes);
-  
-  for(unsigned int i = 0; i < batch_dim; i++){ 
-      
-    int chosen = 0;
-    for (int id = 1; id < num_classes; ++id){
-      if (data[i * num_classes + chosen] < data[i * num_classes + id]) chosen = id;
-    }
-    
-    if(chosen != labels[i])
-      num_errors++;
-
-  }
-
-  float accuracy = ((batch_dim - num_errors) * 1.0 / batch_dim * 1.0) * 100.0;
-  printf("****** Accuracy = %f \n\n", accuracy);
-
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-
-    std::ostringstream ss;
-    ss << std::fixed << accuracy;
-    std::string print_str = ss.str();
-  
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-  }
-
-  fclose(fp);
-
-  return accuracy;    
-}
-
-
-
-float computeAccuracy3(uint32_t* labels, void* result_ptr){
-  
-  struct Tensor* result = (struct Tensor*) result_ptr;
-  
-  size_t batch_dim = result->dims.dim_sizes[0];
-  size_t num_classes = result->dims.dim_sizes[1];
-  float* data = (float*) result->host_data;
-  int num_errors = 0;
-
-  printf("batch_dim = %lu, num_classes = %lu \n", batch_dim, num_classes);
-  
-  for(int i = 0; i < batch_dim; i++){
-  
-    int chosen = 0;
-    for (int id = 1; id < num_classes; ++id){
-      if (data[i * num_classes + chosen] < data[i * num_classes + id]) chosen = id;
-    }
-    
-    if(chosen != labels[i])
-      num_errors++;
-  }
-
-  float accuracy = ((batch_dim - num_errors) * 1.0 / batch_dim * 1.0) * 100.0;
-  printf("****** Accuracy = %f \n\n", accuracy);
-
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-
-    std::ostringstream ss;
-    ss << std::fixed << accuracy;
-    std::string print_str = ss.str();
-  
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-  }
-
-  fclose(fp);
-
-  return accuracy;    
-}
-
-
-
-struct ClassProb{
-  float prob;
-  int index;
-};
-
-
-bool descendFloatComp(ClassProb obj1, ClassProb obj2){
-  return obj1.prob > obj2.prob;
-}
-
-
-float computeTop5Accuracy(uint8_t* labels, int num_labels,
-			  void* result_ptr, unsigned num_classes = 10){
-  
-  struct Tensor* result = (struct Tensor*) result_ptr;
-  
-  size_t batch_dim = result->dims.dim_sizes[0];
-  size_t channels = result->dims.dim_sizes[1];
-  float* data = (float*) result->host_data;
-  int num_errors = 0;
-
-  printf("batch_dim = %lu, channels = %lu \n", batch_dim, channels);
-  
-  for(int i = 0; i < num_labels; i++){
-
-    std::vector<ClassProb> elem_probs;
-    for (int id = 0; id < num_classes; ++id){
-      ClassProb cProb;
-      cProb.prob = data[i * channels + id];
-      cProb.index = id;
-      elem_probs.push_back(cProb);   
-    }
-
-    std:sort(elem_probs.begin(), elem_probs.end(), descendFloatComp);
-    // Check if any of top-5 predictions matches
-    bool matched = false;
-    for(int j = 0; j < 5; j++){
-      ClassProb cProb = elem_probs[j];
-      if(cProb.index == labels[i])
-        matched = true;
-    }
-
-    if(!matched)
-      num_errors +=1; 
-  }
-
-  float accuracy = ((batch_dim - num_errors) * 1.0 / batch_dim * 1.0) * 100.0;
-  printf("****** Accuracy = %f \n\n", accuracy);
-
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-
-    std::ostringstream ss;
-    ss << std::fixed << accuracy;
-    std::string print_str = ss.str();
-  
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-  }
-
-  fclose(fp);
-
-  return accuracy;    
-}
-
-
-
-
-void dumpFinalAccuracy(float accuracy){
-
-  printf("\n\n **** Final Accuracy = %f \n", accuracy);
-  
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-    std::ostringstream ss;
-    ss << std::fixed << accuracy;
-    std::string print_str = ss.str();
-  
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-  }
-
-  fclose(fp);
-
-  run_accuracies.push_back(accuracy);
-}
-
-
-
-void dumpAvgPSNR(float avg_psnr){
-
-  FILE* fp = fopen("avg_psnr", "w+");
-  if(fp != NULL){
-    std::ostringstream ss;
-    ss << std::fixed << avg_psnr;
-    std::string print_str = ss.str(); 
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-  }
-
-  fclose(fp);
-}
-
-
-void dumpPSNRStd(float psnr_std){
-
-  FILE* fp = fopen("psnr_std.txt", "w+");
-  if(fp != NULL){
-    std::ostringstream ss;
-    ss << std::fixed << psnr_std;
-    std::string print_str = ss.str(); 
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-  }
-
-  fclose(fp);
-}
-
-
-
-
-
-void dumpExecutionAccuracies(){
-
-  FILE* fp = fopen("run_accuracies.txt", "w+");
-  if(fp != NULL){  
-    for (int i = 0; i < run_accuracies.size(); i++){
-      float accuracy = run_accuracies[i];
-      std::ostringstream ss;
-      ss << std::fixed << accuracy;
-      std::string print_str = ss.str();
-      fwrite(print_str.c_str(), 1, print_str.length(), fp);
-      fwrite("\n", 1, 1, fp);
-    }
-
-  }
-
-  fclose(fp);
-}
-
-
-float readPSNRFromFile(const char* file_name){
-
-  float psnr;
-  FILE* pFile = fopen(file_name, "r");
-  if(pFile == NULL){
-    printf("ERROR: psnr.txt not found! \n");
-    abort();
-  }
-  
-  fscanf(pFile, "%f", &psnr);
-  printf("**** PSNR read = %f \n\n", psnr);
-  return psnr; 
-}
-
-
-float computePSNRViolation(void* gold_ptr, void* approx_ptr, float PSNR_threshold){
-
-  
-  PSNR_threshold = readPSNRFromFile("psnr.txt");
-  std::vector<float> psnr_list;
-  
-  struct Tensor* gold_tensor = (struct Tensor*) gold_ptr;
-  struct Tensor* approx_tensor = (struct Tensor*) approx_ptr;
-
-  size_t* dim_sizes = gold_tensor->dims.dim_sizes;
-  size_t batch_dim = dim_sizes[0];
-  size_t image_size = dim_sizes[1] * dim_sizes[2] * dim_sizes[3];
-  
-  printf("batch_dim = %lu, image_size = %lu \n", batch_dim, image_size);
-	 
-  float* gold_data = (float*) gold_tensor->host_data;
-  float* approx_data = (float*) approx_tensor->host_data;
-
-  FILE* fp = fopen("img_psnr.txt", "w+");
-
-  float sum_psnr = 0.0;
-  int num_errors = 0;  
-  for(size_t i = 0; i < batch_dim; i++){
-    float mse_sum = 0.0;
-    float max_val = -999999;     
-    size_t offset = i * image_size;
-    
-    for(size_t j = 0; j < image_size; j++){
-      float diff = gold_data[offset + j] - approx_data[offset + j];
-      float diff_square = diff * diff;
-      mse_sum += diff_square;
-
-      if(max_val < gold_data[offset + j]){
-	max_val = gold_data[offset + j];
-      }   
-    }
-
-    mse_sum = mse_sum / image_size;
-    float psnr = 20 * log10(255 / sqrt(mse_sum));
-
-    sum_psnr += psnr;
-    if (psnr < PSNR_threshold)
-      num_errors += 1;    
-
-    printf("PSNR value = %f \n", psnr);
-    psnr_list.push_back(psnr);
-
-    std::ostringstream ss;
-    ss << std::fixed << psnr;
-    std::string print_str = ss.str();
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-    fwrite("\n", 1, 1, fp);
-  }
-
-  float violation_rate = (num_errors * 1.0) / batch_dim * 100.0;
-  printf("*** violation_rate= %f \n\n", violation_rate);
-
-  float avg_psnr = sum_psnr / batch_dim;
-  printf("*** avg_psnr =  %f \n\n", avg_psnr);
-  dumpAvgPSNR(avg_psnr);
- 
-  float success_rate = 100.0 - violation_rate;
-  dumpFinalAccuracy(success_rate);
-
-  fclose(fp);
-
-
-  float var = 0.0;
-  for(size_t i = 0; i < batch_dim; i++){
-    var = var + (psnr_list[i] - avg_psnr) * (psnr_list[i] - avg_psnr); 
-  }
-
-  var /= batch_dim;
-  float std = sqrt(var);
-
-  dumpPSNRStd(std);
-  
-  return violation_rate;  
-}
-
-
-void dumpOutput(void* output_ptr, const char* file_name){
-
-  struct Tensor* out_tensor = (struct Tensor*) output_ptr;  
-  size_t size_in_bytes = out_tensor->size_in_bytes;
-  printf ("** Output size = %lu \n", size_in_bytes);
-  
-  float* host_data = (float*) out_tensor->host_data; 
-  FILE* fd = fopen(file_name, "w+");
-  fwrite(host_data, 1, size_in_bytes, fd);
-  fclose(fd);
-}
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/include/utils_cpu.h b/llvm/projects/hpvm-tensor-rt/dnn_sources/include/utils_cpu.h
deleted file mode 100644
index 45ef7211a4c04f15d1763fde729b4ca550851008..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/include/utils_cpu.h
+++ /dev/null
@@ -1,467 +0,0 @@
-
-// Header guards
-#ifndef UTILS_HEADER
-#define UTILS_HEADER
-
-
-#include <sstream>
-#include <vector>
-#include <bits/stdc++.h>
-#include "../../tensor_runtime/include/tensor_cpu.h"
-#include "../../tensor_runtime/include/tensor_cpu_runtime.h"
-//#include "types.h"
-#include <cmath>
-#include <stdint.h>
-
-
-std::vector<float> run_accuracies;
-
-
-void printTensorInfo(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-
-  if(tensor->gpu_data != NULL){
-    printf("Successful cudaMalloc \n");
-  }
-
-  printf("tensor dims = %d \n", tensor->dims.num_dims);
-  printf("dim1_size = %lu \n", tensor->dims.dim_sizes[0]);
-  printf("dim2_size = %lu \n", tensor->dims.dim_sizes[1]);
-  printf("num_elems = %lu \n", tensor->num_elems);
-}
-
-
-
-void printTensorDims(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-
-  printf("Num_elems = %lu \n", tensor->num_elems);
-  for (int i = 0; i < tensor->dims.num_dims; i++){
-    printf("dim[%d] = %lu \n", i, tensor->dims.dim_sizes[i]);
-  }
-}
-
-
-
-void compareTensors(void* tensor1_ptr, void* tensor2_ptr){
-
-  struct Tensor* tensor1 = (struct Tensor*) tensor1_ptr;
-  struct Tensor* tensor2 = (struct Tensor*) tensor2_ptr;
-
-  //hpvm_request_tensor(tensor1, 0);
-  //hpvm_request_tensor(tensor2, 0);
-
-  float* tensor_data1 = (float*) tensor1->host_data;
-  float* tensor_data2 = (float*) tensor2->host_data;
-  
-  for(unsigned int i = 0; i < tensor1->num_elems; i++){
-    if(tensor_data1[i] != tensor_data2[i]){
-      printf("Tensor data mismatch at index %d \n", i);
-      abort();
-    }
-  }
-}
-
-
-
-//*** FIXIT: Move this to CPU-only
-struct Tensor* readTrainedWeightsCPU(const char* file_name, int data_type,
-				     int dim1_size, int dim2_size,
-				     int dim3_size, int dim4_size){
-
-  // FIXIT: Don't assume floating point types
-  int type_size = 4; // NOTE: Assuming floating point tensors
-  long int num_elems = dim1_size * dim2_size * dim3_size * dim4_size;
-  long int size_in_bytes = type_size * dim1_size * dim2_size * dim3_size * dim4_size;
-  float* tensor_data = (float*) malloc(sizeof(float) * num_elems);
-  int file_header_size = 0;
-  
-  FILE* file = fopen(file_name, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting... \n", file_name);
-    abort();
-  }
-    
-  fseek(file, file_header_size, SEEK_CUR); // Skipping the file header
-  size_t bytes_read = fread(tensor_data, 1, size_in_bytes, file);
-
-  printf("size in bytes = %lu, bytes read = %lu \n", size_in_bytes, bytes_read);
-
-  fclose(file);
-
-  
-  struct Tensor* weights = (struct Tensor*) create4DTensor(data_type, nchw, dim1_size, dim2_size,
-							   dim3_size, dim4_size);
-  
-  initTensorData(weights, tensor_data, size_in_bytes);
-  //compareValues(weights, tensor_data, num_elems);
-  free(tensor_data);
-
-  return weights;
-}
-
-
-struct Tensor* readTrainedWeights(const char* file_name, int data_type,
-				     int dim1_size, int dim2_size,
-				     int dim3_size, int dim4_size){
-
-  return readTrainedWeightsCPU(file_name, data_type, dim1_size, dim2_size, dim3_size, dim4_size);
-}
-
-
-
-uint8_t* readLabels(const char* labels_file, int num_labels){
-
-  uint8_t* labels = (uint8_t*) malloc(sizeof(uint8_t) * num_labels);
-  FILE* file = fopen(labels_file, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting...\n", labels_file);
-    abort();
-  }
-
-  size_t bytes_read = fread(labels, 1, sizeof(uint8_t) * num_labels, file);
-
-  fclose(file);
-  
-  return labels;
-}
-
-
-uint8_t* readLabelsBatch(const char* labels_file, int start, int end){
-
-  int num_labels = end - start;
-  int file_header_size = sizeof(uint8_t) * start;
-  
-  uint8_t* labels = (uint8_t*) malloc(sizeof(uint8_t) * num_labels);
-  FILE* file = fopen(labels_file, "rb");
-  if(file == NULL){
-    printf("Data file %s is not found. Aborting...\n", labels_file);
-    abort();
-  }
-  
-  fseek(file, file_header_size, SEEK_SET); // Skipping the file header
-    
-  size_t bytes_read = fread(labels, 1, sizeof(uint8_t) * num_labels, file);
-
-
-  fclose(file);
-  
-  return labels;
-}
-
-
-
-void computeAccuracy(const char* labels_file, int num_labels, void* result_ptr){
-
-  struct Tensor* result = (struct Tensor*) result_ptr;
-  
-  uint8_t* labels = readLabels(labels_file, num_labels);
-  size_t batch_dim = result->dims.dim_sizes[0];
-  size_t channels = result->dims.dim_sizes[1];
-  float* data = (float*) result->host_data;
-  int num_errors = 0;
-  
-  for(int i = 0; i < batch_dim; i++){
-    int chosen = 0;
-    for (int id = 1; id < 10; ++id){
-      if (data[i * channels + chosen] < data[i * channels + id]) chosen = id;
-    }
-    
-    if(chosen != labels[i])
-      num_errors++;
-  }
-
-  float accuracy = ((batch_dim - num_errors) * 1.0 / batch_dim * 1.0) * 100.0;
-  printf("****** Accuracy = %f \n\n", accuracy);
-
-
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-    fprintf(fp, "%f", accuracy);
-    fclose(fp);
-  }
-  
-}
-
-
-
-float computeAccuracy2(uint8_t* labels, int num_labels, void* result_ptr, unsigned num_classes = 10){
-
-  unsigned num_zeros = 0;
-  
-  struct Tensor* result = (struct Tensor*) result_ptr;
-  
-  size_t batch_dim = result->dims.dim_sizes[0];
-  size_t channels = result->dims.dim_sizes[1];
-  float* data = (float*) result->host_data;
-  int num_errors = 0;
-
-  printf("batch_dim = %lu, channels = %lu \n", batch_dim, channels);
-  
-  for(int i = 0; i < num_labels; i++){  
-    int chosen = 0;
-    for (int id = 1; id < num_classes; ++id){
-      if (data[i * channels + chosen] < data[i * channels + id]) chosen = id;
-    }
-    
-    if(labels[i] == 0)
-      num_zeros++;
-      
-    if(chosen != labels[i])
-      num_errors++;
-  }
-
-  float accuracy = ((batch_dim - num_errors) * 1.0 / batch_dim * 1.0) * 100.0;
-  printf("****** Accuracy = %f \n\n", accuracy);
-
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-    fprintf(fp, "%f", accuracy);
-  }
-
-  fclose(fp);
-
-  return accuracy;    
-}
-
-
-struct ClassProb{
-  float prob;
-  int index;
-};
-
-
-bool descendFloatComp(ClassProb obj1, ClassProb obj2){
-  return obj1.prob > obj2.prob;
-}
-
-
-float computeTop5Accuracy(uint8_t* labels, int num_labels, void* result_ptr, unsigned num_classes = 10){
-  
-  struct Tensor* result = (struct Tensor*) result_ptr;
-  
-  size_t batch_dim = result->dims.dim_sizes[0];
-  size_t channels = result->dims.dim_sizes[1];
-  float* data = (float*) result->host_data;
-  int num_errors = 0;
-
-  printf("batch_dim = %lu, channels = %lu \n", batch_dim, channels);
-  
-  for(int i = 0; i < num_labels; i++){
-
-    std::vector<ClassProb> elem_probs;
-    for (int id = 0; id < num_classes; ++id){
-      ClassProb cProb;
-      cProb.prob = data[i * channels + id];
-      cProb.index = id;
-      elem_probs.push_back(cProb);   
-    }
-
-    std:sort(elem_probs.begin(), elem_probs.end(), descendFloatComp);
-    // Check if any of top-5 predictions matches
-    bool matched = false;
-    for(int j = 0; j < 5; j++){
-      ClassProb cProb = elem_probs[j];
-      if(cProb.index == labels[i])
-        matched = true;
-    }
-
-    if(!matched)
-      num_errors +=1; 
-  }
-
-  float accuracy = ((batch_dim - num_errors) * 1.0 / batch_dim * 1.0) * 100.0;
-  printf("****** Accuracy = %f \n\n", accuracy);
-
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-    fprintf(fp, "%f", accuracy);
-  }
-
-  fclose(fp);
-
-  return accuracy;    
-}
-
-
-
-
-void dumpFinalAccuracy(float accuracy){
-
-  printf("\n\n **** Final Accuracy = %f \n", accuracy);
-  
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-    fprintf(fp, "%f", accuracy);
-  }
-
-  fclose(fp);
-
-  run_accuracies.push_back(accuracy);
-}
-
-
-
-/*void dumpAvgPSNR(float avg_psnr){
-
-  FILE* fp = fopen("avg_psnr", "w+");
-  if(fp != NULL){
-    std::ostringstream ss;
-    ss << std::fixed << avg_psnr;
-    std::string print_str = ss.str(); 
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-  }
-
-  fclose(fp);
-}
-*/
-
-/*void dumpPSNRStd(float psnr_std){
-
-  FILE* fp = fopen("psnr_std.txt", "w+");
-  if(fp != NULL){
-    std::ostringstream ss;
-    ss << std::fixed << psnr_std;
-    std::string print_str = ss.str(); 
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-  }
-
-  fclose(fp);
-}*/
-
-
-
-
-/*
-void dumpExecutionAccuracies(){
-
-  FILE* fp = fopen("run_accuracies.txt", "w+");
-  if(fp != NULL){  
-    for (int i = 0; i < run_accuracies.size(); i++){
-      float accuracy = run_accuracies[i];
-      std::ostringstream ss;
-      ss << std::fixed << accuracy;
-      std::string print_str = ss.str();
-      fwrite(print_str.c_str(), 1, print_str.length(), fp);
-      fwrite("\n", 1, 1, fp);
-    }
-
-  }
-
-  fclose(fp);
-}
-*/
-
-float readPSNRFromFile(const char* file_name){
-
-  float psnr;
-  FILE* pFile = fopen(file_name, "r");
-  if(pFile == NULL){
-    printf("ERROR: psnr.txt not found! \n");
-    abort();
-  }
-  
-  fscanf(pFile, "%f", &psnr);
-  printf("**** PSNR read = %f \n\n", psnr);
-  return psnr; 
-}
-
-
-/*float computePSNRViolation(void* gold_ptr, void* approx_ptr, float PSNR_threshold){
-
-  
-  PSNR_threshold = readPSNRFromFile("psnr.txt");
-  std::vector<float> psnr_list;
-  
-  struct Tensor* gold_tensor = (struct Tensor*) gold_ptr;
-  struct Tensor* approx_tensor = (struct Tensor*) approx_ptr;
-
-  size_t* dim_sizes = gold_tensor->dims.dim_sizes;
-  size_t batch_dim = dim_sizes[0];
-  size_t image_size = dim_sizes[1] * dim_sizes[2] * dim_sizes[3];
-  
-  printf("batch_dim = %lu, image_size = %lu \n", batch_dim, image_size);
-	 
-  float* gold_data = (float*) gold_tensor->host_data;
-  float* approx_data = (float*) approx_tensor->host_data;
-
-  FILE* fp = fopen("img_psnr.txt", "w+");
-
-  float sum_psnr = 0.0;
-  int num_errors = 0;  
-  for(size_t i = 0; i < batch_dim; i++){
-    float mse_sum = 0.0;
-    float max_val = -999999;     
-    size_t offset = i * image_size;
-    
-    for(size_t j = 0; j < image_size; j++){
-      float diff = gold_data[offset + j] - approx_data[offset + j];
-      float diff_square = diff * diff;
-      mse_sum += diff_square;
-
-      if(max_val < gold_data[offset + j]){
-	max_val = gold_data[offset + j];
-      }   
-    }
-
-    mse_sum = mse_sum / image_size;
-    float psnr = 20 * log10(255 / sqrt(mse_sum));
-
-    sum_psnr += psnr;
-    if (psnr < PSNR_threshold)
-      num_errors += 1;    
-
-    printf("PSNR value = %f \n", psnr);
-    psnr_list.push_back(psnr);
-
-    std::ostringstream ss;
-    ss << std::fixed << psnr;
-    std::string print_str = ss.str();
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-    fwrite("\n", 1, 1, fp);
-  }
-
-  float violation_rate = (num_errors * 1.0) / batch_dim * 100.0;
-  printf("*** violation_rate= %f \n\n", violation_rate);
-
-  float avg_psnr = sum_psnr / batch_dim;
-  printf("*** avg_psnr =  %f \n\n", avg_psnr);
-  dumpAvgPSNR(avg_psnr);
- 
-  float success_rate = 100.0 - violation_rate;
-  dumpFinalAccuracy(success_rate);
-
-  fclose(fp);
-
-
-  float var = 0.0;
-  for(size_t i = 0; i < batch_dim; i++){
-    var = var + (psnr_list[i] - avg_psnr) * (psnr_list[i] - avg_psnr); 
-  }
-
-  var /= batch_dim;
-  float std = sqrt(var);
-
-  //dumpPSNRStd(std);
-  
-  return violation_rate;  
-}*/
-
-
-void dumpOutput(void* output_ptr, const char* file_name){
-
-  struct Tensor* out_tensor = (struct Tensor*) output_ptr;  
-  size_t size_in_bytes = out_tensor->size_in_bytes;
-  printf ("** Output size = %lu \n", size_in_bytes);
-  
-  float* host_data = (float*) out_tensor->host_data; 
-  FILE* fd = fopen(file_name, "w+");
-  fwrite(host_data, 1, size_in_bytes, fd);
-  fclose(fd);
-}
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/#test_fc_network2.cc# b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/#test_fc_network2.cc#
deleted file mode 100644
index 88ceb9105059aeb2eca8f18029af674ea8b14a10..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/#test_fc_network2.cc#
+++ /dev/null
@@ -1,92 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-#include "../include/types.h"
-
-
-void test4LayerFC(){
-
-  printf("********* 4-layer FC Network ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				float_type,
-				test_batch_size, 1, 28, 28);    
-  void* fc1_weights = readTrainedWeights("../model_params/FC_network1/fc1.bin",
-					 float_type, 1, 1, 784, 1000);  
-  void* fc1_bias = readTrainedWeights("../model_params/FC_network1/fc1_bias.bin",
-				      float_type, 1, 1000, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/FC_network1/fc2.bin",
-						  float_type, 1, 1, 1000, 500);  
-  void* fc2_bias = readTrainedWeights("../model_params/FC_network1/fc2_bias.bin",
-					       float_type, 1, 500, 1, 1);  
-  void* fc3_weights = readTrainedWeights("../model_params/FC_network1/fc3.bin",
-						  float_type, 1, 1, 500, 200);  
-  void* fc3_bias = readTrainedWeights("../model_params/FC_network1/fc3_bias.bin",
-					       float_type, 1, 200, 1, 1);  
-  void* fc4_weights = readTrainedWeights("../model_params/FC_network1/fc4.bin",
-						  float_type, 1, 1, 200, 10);  
-  void* fc4_bias = readTrainedWeights("../model_params/FC_network1/fc4_bias.bin",
-					       float_type, 1, 10, 1, 1);  
-
-  //dumpWeightsToFile("tensors_out/input_fc.out", input);
-  //dumpWeightsToFile("tensors_out/fc1_w_fc.out", fc1_weights);  
-
-  // Start Profiling execution times of Tensor operations
-  startProfiling();
-  
-  // Layer-1
-  void* fc1out = tensorGemmGPU(input, fc1_weights);  
-  printTensorDims(fc1out);
-  //dumpWeightsToFile("tensors_out/fc1out_fc.out", fc1out);  
-  
-  void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-  printTensorDims(fc1_bias_out);
-  //dumpWeightsToFile("tensors_out/fc_fc1.out", fc1_bias_out);
- 
-  // Layer-2
-  void* fc2out = tensorGemmGPU(fc1_bias_out, fc2_weights);  
-  printTensorDims(fc2out);
-  
-  void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  printTensorDims(fc2_bias_out);
-
-  // Layer-3
-  void* fc3out = tensorGemmGPU(fc2_bias_out, fc3_weights);  
-  printTensorDims(fc3out);
-  
-  void* fc3_bias_out = tensorAdd(fc3out, fc3_bias);
-  printTensorDims(fc3_bias_out);
-
-  // Layer-4
-  void* fc4out = tensorGemmGPU(fc3_bias_out, fc4_weights);  
-  printTensorDims(fc4out);
-  
-  void* fc4_bias_out = tensorAdd(fc4out, fc4_bias);
-  printTensorDims(fc4_bias_out);
- 
-  void* result = tensorSoftmax(fc4_bias_out);
-  printTensorDims(result);
-
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size, result);
-} 
-
-
-
-
-int main(){
-
-  initializeRuntime(0);
-
-  test4LayerFC();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet2_cifar10.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet2_cifar10.cc
deleted file mode 100644
index fe71eb14caedba8d5813bbb0fa7feadcf0c72950..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet2_cifar10.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  printf("********* Alexnet2 CIFAR-10 DNN ********** \n");
- 
-  std::string dir_prefix = std::string("../model_params/alexnet2_cifar10/"); 
-  std::string input_path =  dir_prefix + std::string("norm_cifar_input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("test_labels.bin"); 
-
-  void* conv1_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv1.bin",
-					  float_type, 32, 3, 3, 3);  
-  void* conv1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv2.bin",
-					  float_type, 32, 32, 3, 3);  
-  void* conv2_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv2_bias.bin",
-					float_type, 1, 32, 1, 1);
-  void* conv3_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv3.bin",
-					  float_type, 64, 32, 3, 3);  
-  void* conv3_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv3_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* conv4_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv4.bin",
-					  float_type, 64, 64, 3, 3);  
-  void* conv4_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv4_bias.bin",
-					float_type, 1, 64, 1, 1);
-  void* conv5_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv5.bin",
-					  float_type, 128, 64, 3, 3);  
-  void* conv5_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv5_bias.bin",
-					float_type, 1, 128, 1, 1);
-  void* conv6_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv6.bin",
-					  float_type, 128, 128, 3, 3);  
-  void* conv6_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv6_bias.bin",
-					float_type, 1, 128, 1, 1);
-  
-  void* fc1_weights = readTrainedWeights("../model_params/alexnet2_cifar10/fc1.bin",
-					 float_type, 1, 1, 2048, 10);  
-  void* fc1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/fc1_bias.bin",
-				      float_type, 1, 10, 1, 1);  
- 
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-
-  startMemTracking();
-
-  int test_input_size = 10000;
-  int batch_size = 2500;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-    
-    void* conv1out = tensorConvolution(input, conv1_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv1out, conv1_bias); 
-    void* conv1_tanh = tensorTanh(conv1out);
-    
-    // 2nd Layer
-    void* conv2out = tensorConvolution(conv1_tanh, conv2_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv2out, conv2_bias); 
-    void* conv2_tanh = tensorTanh(conv2out);
-    void* pool2out = tensorPooling(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-     
-    // 3rd Layer
-    void* conv3out = tensorConvolution(pool2out, conv3_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv3out, conv3_bias); 
-    void* conv3_tanh = tensorTanh(conv3out);
-
-    // 4th Layer
-    void* conv4out = tensorConvolution(conv3_tanh, conv4_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv4out, conv4_bias); 
-    void* conv4_tanh = tensorTanh(conv4out);
-    void* pool4out = tensorPooling(conv4_tanh, 0, 2, 2, 0, 0, 2, 2);
-    
-    // 5th Layer
-    void* conv5out = tensorConvolution(pool4out, conv5_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv5out, conv5_bias); 
-    void* conv5_tanh = tensorTanh(conv5out);
-
-    // 6th Layer
-    void* conv6out = tensorConvolution(conv5_tanh, conv6_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv6out, conv6_bias); 
-  
-    void* conv6_tanh = tensorTanh(conv6out);
-    void* pool6out = tensorPooling(conv6_tanh, 0, 2, 2, 0, 0, 2, 2);
-    
-    // final FC Layer
-    void* gemm1out = tensorGemmGPU(pool6out, fc1_weights);  
-    void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-    void* result = tensorSoftmax(gemm1biasout);
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, result); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-}
-
-
-int main(int argc, char* argv[]){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet2_cifar10_tuner.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet2_cifar10_tuner.cc
deleted file mode 100644
index 08e5817fc4aa037bc59cceafc1baba382696e329..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet2_cifar10_tuner.cc
+++ /dev/null
@@ -1,162 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-
-int total_runs = 1;
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  printf("********* Alexnet2 CIFAR-10 DNN ********** \n");
- 
-  std::string dir_prefix = std::string("../model_params/alexnet2_cifar10/"); 
-  std::string input_path =  dir_prefix + std::string("norm_cifar_input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("test_labels.bin"); 
-
-  void* conv1_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv1.bin",
-					  float_type, 32, 3, 3, 3);  
-  void* conv1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv2.bin",
-					  float_type, 32, 32, 3, 3);  
-  void* conv2_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv2_bias.bin",
-					float_type, 1, 32, 1, 1);
-  void* conv3_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv3.bin",
-					  float_type, 64, 32, 3, 3);  
-  void* conv3_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv3_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* conv4_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv4.bin",
-					  float_type, 64, 64, 3, 3);  
-  void* conv4_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv4_bias.bin",
-					float_type, 1, 64, 1, 1);
-  void* conv5_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv5.bin",
-					  float_type, 128, 64, 3, 3);  
-  void* conv5_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv5_bias.bin",
-					float_type, 1, 128, 1, 1);
-  void* conv6_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv6.bin",
-					  float_type, 128, 128, 3, 3);  
-  void* conv6_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv6_bias.bin",
-					float_type, 1, 128, 1, 1);
-  
-  void* fc1_weights = readTrainedWeights("../model_params/alexnet2_cifar10/fc1.bin",
-					 float_type, 1, 1, 2048, 10);  
-  void* fc1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/fc1_bias.bin",
-				      float_type, 1, 10, 1, 1);  
- 
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-
-  startMemTracking();
-
-  int test_input_size = 500;
-  int batch_size = 500;
-  int offset = 5000;
-  
-  int batch_count = test_input_size / batch_size;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-
-  for(int j = 0; j < total_runs; j++){
-    
-    float final_accuracy = 0.0;
-    for(int i = 0; i < batch_count; i++){
-
-      int start = i * batch_size + offset;
-      int end = (i + 1) * batch_size + offset;
-      void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-    
-      void* conv1out = tensorConvolution(input, conv1_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv1out, conv1_bias); 
-      void* conv1_tanh = tensorTanh(conv1out);
-    
-      // 2nd Layer
-      void* conv2out = tensorConvolution(conv1_tanh, conv2_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv2out, conv2_bias); 
-      void* conv2_tanh = tensorTanh(conv2out);
-      void* pool2out = tensorPooling(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-     
-      // 3rd Layer
-      void* conv3out = tensorConvolution(pool2out, conv3_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv3out, conv3_bias); 
-      void* conv3_tanh = tensorTanh(conv3out);
-
-      // 4th Layer
-      void* conv4out = tensorConvolution(conv3_tanh, conv4_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv4out, conv4_bias); 
-      void* conv4_tanh = tensorTanh(conv4out);
-      void* pool4out = tensorPooling(conv4_tanh, 0, 2, 2, 0, 0, 2, 2);
-    
-      // 5th Layer
-      void* conv5out = tensorConvolution(pool4out, conv5_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv5out, conv5_bias); 
-      void* conv5_tanh = tensorTanh(conv5out);
-
-      // 6th Layer
-      void* conv6out = tensorConvolution(conv5_tanh, conv6_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv6out, conv6_bias); 
-  
-      void* conv6_tanh = tensorTanh(conv6out);
-      void* pool6out = tensorPooling(conv6_tanh, 0, 2, 2, 0, 0, 2, 2);
-    
-      // final FC Layer
-      void* gemm1out = tensorGemmGPU(pool6out, fc1_weights);  
-      void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-      void* result = tensorSoftmax(gemm1biasout);
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, result); 
-      final_accuracy += accuracy;
-    
-      freeBatchMemory();
-    }
-
-    stopProfiling();
-
-    final_accuracy = final_accuracy / batch_count;
-    dumpFinalAccuracy(final_accuracy);
-  }
-
-
-  dumpExecutionAccuracies();
-     
-
-}
-
-
-
-int main(int argc, char* argv[]){
-
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  llvm_hpvm_initTensorRt(0);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10.cc
deleted file mode 100644
index 3e5cec7d0760252ebff1b31293a51bdf570415f4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10.cc
+++ /dev/null
@@ -1,196 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  int total_runs = 100;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* CIFAR-10 DNN ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-  //uint8_t* labels = readLabels("../model_params/cifar_keras/labels.bin", test_batch_size);
-  uint8_t* labels = readLabels("../model_params/alexnet_cifar10/test_labels.bin", test_batch_size);
-    
-  void* input = readTrainedWeights("../model_params/alexnet_cifar10/norm_cifar_input.bin",
-			  	   float_type,
-				   test_batch_size, 3, 32, 32);
-
-  void* conv1_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv1.bin",
-					  float_type, 64, 3, 11, 11);  
-  void* conv1_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv1_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv2.bin",
-					  float_type, 192, 64, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv2_bias.bin",
-					float_type, 1, 192, 1, 1);
-
-  void* conv3_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv3.bin",
-					  float_type, 384, 192, 3, 3);  
-  void* conv3_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv3_bias.bin",
-					float_type, 1, 384, 1, 1);  
-  void* conv4_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv4.bin",
-					  float_type, 256, 384, 3, 3);  
-  void* conv4_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv4_bias.bin",
-					float_type, 1, 256, 1, 1);
-  void* conv5_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv5.bin",
-					  float_type, 256, 256, 3, 3);  
-  void* conv5_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv5_bias.bin",
-					float_type, 1, 256, 1, 1);
-  
-  void* fc1_weights = readTrainedWeights("../model_params/alexnet_cifar10/fc1.bin",
-					 float_type, 1, 1, 4096, 10);  
-  void* fc1_bias = readTrainedWeights("../model_params/alexnet_cifar10/fc1_bias.bin",
-				      float_type, 1, 10, 1, 1);  
- 
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    startProfiling();
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorConvolution(input, conv1_filter, 5, 5, 1, 1,
-				       conv_mode, conv_precision);
-
-    tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* conv1_tanh = tensorTanh(conv1out);
-
-    void* pool1out = tensorPooling(conv1_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-    // 2nd Layer
-    void* conv2out = tensorConvolution(pool1out, conv2_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* conv2_tanh = tensorTanh(conv2out);
-
-    void* pool2out = tensorPooling(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-      
-
-    // 3rd Layer
-    void* conv3out = tensorConvolution(pool2out, conv3_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv3out, conv3_bias); // NOTE: In place operation
-  
-    void* conv3_tanh = tensorTanh(conv3out);
-
-    // 4th Layer
-    void* conv4out = tensorConvolution(conv3_tanh, conv4_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv4out, conv4_bias); // NOTE: In place operation
-  
-    void* conv4_tanh = tensorTanh(conv4out);
-    
-    // 5th Layer
-    void* conv5out = tensorConvolution(conv4_tanh, conv5_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv5out, conv5_bias); // NOTE: In place operation
-  
-    void* conv5_tanh = tensorTanh(conv5out);
-
-    void* pool5out = tensorPooling(conv5_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-    // final FC Layer
-    void* gemm1out = tensorGemmGPU(pool5out, fc1_weights);  
-
-    void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-
-    void* result = tensorSoftmax(gemm1biasout);
-
-    printTensorDims(result);
-    
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_approx.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_approx.cc
deleted file mode 100644
index 7713b8105ac0f9bc6f1dae6899548599e5ede0ce..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_approx.cc
+++ /dev/null
@@ -1,196 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  int total_runs = 100;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* CIFAR-10 DNN ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 1000; //5000
-
-  //uint8_t* labels = readLabels("../model_params/cifar_keras/labels.bin", test_batch_size);
-  uint8_t* labels = readLabels("../model_params/alexnet_cifar10/test_labels.bin", test_batch_size);
-    
-  void* input = readTrainedWeights("../model_params/alexnet_cifar10/norm_cifar_input.bin",
-			  	   float_type,
-				   test_batch_size, 3, 32, 32);
-
-  void* conv1_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv1.bin",
-					  float_type, 64, 3, 11, 11);  
-  void* conv1_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv1_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv2.bin",
-					  float_type, 192, 64, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv2_bias.bin",
-					float_type, 1, 192, 1, 1);
-
-  void* conv3_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv3.bin",
-					  float_type, 384, 192, 3, 3);  
-  void* conv3_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv3_bias.bin",
-					float_type, 1, 384, 1, 1);  
-  void* conv4_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv4.bin",
-					  float_type, 256, 384, 3, 3);  
-  void* conv4_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv4_bias.bin",
-					float_type, 1, 256, 1, 1);
-  void* conv5_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv5.bin",
-					  float_type, 256, 256, 3, 3);  
-  void* conv5_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv5_bias.bin",
-					float_type, 1, 256, 1, 1);
-  
-  void* fc1_weights = readTrainedWeights("../model_params/alexnet_cifar10/fc1.bin",
-					 float_type, 1, 1, 4096, 10);  
-  void* fc1_bias = readTrainedWeights("../model_params/alexnet_cifar10/fc1_bias.bin",
-				      float_type, 1, 10, 1, 1);  
- 
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    startProfiling();
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorConvPerf(input, conv1_filter, 5, 5, 1, 1,
-				    conv_mode, conv_precision, 0, 0);
-
-    tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* conv1_tanh = tensorTanh(conv1out);
-
-    void* pool1out = tensorPooling(conv1_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-    // 2nd Layer
-    void* conv2out = tensorConvPerf(pool1out, conv2_filter, 2, 2, 1, 1,
-				    conv_mode, conv_precision, 0, 0);
-    tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* conv2_tanh = tensorTanh(conv2out);
-
-    void* pool2out = tensorPooling(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-      
-
-    // 3rd Layer
-    void* conv3out = tensorConvPerf(pool2out, conv3_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision, 0, 0);
-    tensorAdd(conv3out, conv3_bias); // NOTE: In place operation
-  
-    void* conv3_tanh = tensorTanh(conv3out);
-
-    // 4th Layer
-    void* conv4out = tensorConvPerf(conv3_tanh, conv4_filter, 1, 1, 1, 1,
-					     conv_mode, conv_precision, 2, 0);
-    tensorAdd(conv4out, conv4_bias); // NOTE: In place operation
-  
-    void* conv4_tanh = tensorTanh(conv4out);
-    
-    // 5th Layer
-    void* conv5out = tensorConvPerf(conv4_tanh, conv5_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision, 0, 0);
-    tensorAdd(conv5out, conv5_bias); // NOTE: In place operation
-  
-    void* conv5_tanh = tensorTanh(conv5out);
-
-    void* pool5out = tensorPooling(conv5_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-    // final FC Layer
-    void* gemm1out = tensorGemmGPU(pool5out, fc1_weights);  
-
-    void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-
-    void* result = tensorSoftmax(gemm1biasout);
-
-    printTensorDims(result);
-    
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_cpu.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_cpu.cc
deleted file mode 100644
index b64d52678238825fe6e6368d1d15f7958c3759aa..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_cpu.cc
+++ /dev/null
@@ -1,181 +0,0 @@
-
-
-#include <fcntl.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_cpu_runtime.h"
-#include "../include/utils_cpu.h"
-#include "../include/types.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  int total_runs = 100;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* CIFAR-10 DNN ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 100;
-
-  //uint8_t* labels = readLabels("../model_params/cifar_keras/labels.bin", test_batch_size);
-  uint8_t* labels = readLabels("./model_params/alexnet_cifar10/test_labels.bin", test_batch_size);
-    
-  void* input = readTrainedWeightsCPU("./model_params/alexnet_cifar10/norm_cifar_input.bin",
-			  	   float_type,
-				   test_batch_size, 3, 32, 32);
-
-  void* conv1_filter = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv1.bin",
-					  float_type, 64, 3, 11, 11);  
-  void* conv1_bias = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv1_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* conv2_filter = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv2.bin",
-					  float_type, 192, 64, 5, 5);  
-  void* conv2_bias = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv2_bias.bin",
-					float_type, 1, 192, 1, 1);
-
-  void* conv3_filter = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv3.bin",
-					  float_type, 384, 192, 3, 3);  
-  void* conv3_bias = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv3_bias.bin",
-					float_type, 1, 384, 1, 1);  
-  void* conv4_filter = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv4.bin",
-					  float_type, 256, 384, 3, 3);  
-  void* conv4_bias = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv4_bias.bin",
-					float_type, 1, 256, 1, 1);
-  void* conv5_filter = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv5.bin",
-					  float_type, 256, 256, 3, 3);  
-  void* conv5_bias = readTrainedWeightsCPU("./model_params/alexnet_cifar10/conv5_bias.bin",
-					float_type, 1, 256, 1, 1);
-  
-  void* fc1_weights = readTrainedWeightsCPU("./model_params/alexnet_cifar10/fc1.bin",
-					 float_type, 1, 1, 4096, 10);  
-  void* fc1_bias = readTrainedWeightsCPU("./model_params/alexnet_cifar10/fc1_bias.bin",
-				      float_type, 1, 10, 1, 1);  
- 
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorConvolutionCPU(input, conv1_filter, 5, 5, 1, 1,
-				       conv_mode, conv_precision);
-
-    tensorAddCPU(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* conv1_tanh = tensorTanhCPU(conv1out);
-
-    void* pool1out = tensorPoolingCPU(conv1_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-    // 2nd Layer
-    void* conv2out = tensorConvolutionCPU(pool1out, conv2_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAddCPU(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* conv2_tanh = tensorTanhCPU(conv2out);
-
-    void* pool2out = tensorPoolingCPU(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-      
-
-    // 3rd Layer
-    void* conv3out = tensorConvolutionCPU(pool2out, conv3_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAddCPU(conv3out, conv3_bias); // NOTE: In place operation
-  
-    void* conv3_tanh = tensorTanhCPU(conv3out);
-
-    // 4th Layer
-    void* conv4out = tensorConvolutionCPU(conv3_tanh, conv4_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAddCPU(conv4out, conv4_bias); // NOTE: In place operation
-  
-    void* conv4_tanh = tensorTanhCPU(conv4out);
-    
-    // 5th Layer
-    void* conv5out = tensorConvolutionCPU(conv4_tanh, conv5_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAddCPU(conv5out, conv5_bias); // NOTE: In place operation
-  
-    void* conv5_tanh = tensorTanhCPU(conv5out);
-
-    void* pool5out = tensorPoolingCPU(conv5_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-    // final FC Layer
-    void* gemm1out = tensorGemmCPU(pool5out, fc1_weights);  
-
-    void* gemm1biasout = tensorAddCPU(gemm1out, fc1_bias);
-
-    void* result = tensorSoftmaxCPU(gemm1biasout);
-  
-    computeAccuracy2(labels, test_batch_size, result);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_front.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_front.cc
deleted file mode 100644
index 84510c5342811eb20c8c7e834f4fcf34d5561ccb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_front.cc
+++ /dev/null
@@ -1,104 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/alexnet_cifar10_front/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv0.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv_bias0.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv3.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv_bias3.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv6.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv_bias6.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv7.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv_bias7.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv8.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv_bias8.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("fc12.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("fc_bias12.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 10000;
-  int batch_size = 2500;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);    
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 5, 5, 1, 1, 1, 0); 
-    void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorTanh(var_1); 
-    void* var_3 = tensorPooling(var_2,0,2,2,0,0,2,2); 
-    void* var_5 = tensorConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 0); 
-    void* var_6 = tensorAdd(var_5, conv2d_2_b); 
-    void* var_7 = tensorTanh(var_6); 
-    void* var_8 = tensorPooling(var_7,0,2,2,0,0,2,2); 
-    void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-    void* var_12 = tensorTanh(var_11); 
-    void* var_13 = tensorConvolution(var_12, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_14 = tensorAdd(var_13, conv2d_4_b); 
-    void* var_15 = tensorTanh(var_14); 
-    void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-    void* var_18 = tensorTanh(var_17); 
-    void* var_19 = tensorPooling(var_18,0,2,2,0,0,2,2); 
-    void* var_22 = tensorGemmGPU(var_19, dense_1_w); 
-    void* var_23 = tensorAdd(var_22, dense_1_b); 
-    void* var_24 = tensorSoftmax(var_23); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels,batch_size,var_24); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_tuner.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_tuner.cc
deleted file mode 100644
index d45cfa9ef3294c4c588b3abb98100dd8391529b7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/alexnet_cifar10_tuner.cc
+++ /dev/null
@@ -1,123 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-
-int main(int argc, char* argv[]){ 
-
-  int total_runs = 1;
-
-  if (argc > 1){
-    printf("argv[1] = %s \n", argv[1]);
-    total_runs = atoi(argv[1]);
-    printf("total_runs  %d \n", total_runs);
-  }
-
-  
-  llvm_hpvm_initTensorRt(0); 
- 
- 
-  std::string dir_prefix = std::string("../model_params/alexnet_cifar10_front/"); 
-  //std::string input_path =  dir_prefix + std::string("alexnet_calib.bin");
-  //std::string labels_path =  dir_prefix + std::string("alexnet_train_labels.bin"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv0.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv_bias0.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv3.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv_bias3.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv6.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv_bias6.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv7.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv_bias7.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv8.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv_bias8.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("fc12.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("fc_bias12.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 500;
-  int batch_size = 500;
-  int offset = 5000;
-  
-  int batch_count = test_input_size / batch_size;
-
-  
-  // NOTE: Starting time profiling
-  startProfiling();
-
-  for(int j = 0; j < total_runs; j++){
-
-    float final_accuracy = 0.0;
-    for(int i = 0; i < batch_count; i++){
-
-      int start = (i * batch_size) + offset;
-      int end = (i + 1) * batch_size + offset;
-      void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);    
-
-      void* var_0 = tensorConvolution(input, conv2d_1_w, 5, 5, 1, 1, 1, 0); 
-      void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-      void* var_2 = tensorTanh(var_1); 
-      void* var_3 = tensorPooling(var_2,0,2,2,0,0,2,2); 
-      void* var_5 = tensorConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 0); 
-      void* var_6 = tensorAdd(var_5, conv2d_2_b); 
-      void* var_7 = tensorTanh(var_6); 
-      void* var_8 = tensorPooling(var_7,0,2,2,0,0,2,2); 
-      void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-      void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-      void* var_12 = tensorTanh(var_11); 
-      void* var_13 = tensorConvolution(var_12, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-      void* var_14 = tensorAdd(var_13, conv2d_4_b); 
-      void* var_15 = tensorTanh(var_14); 
-      void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-      void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-      void* var_18 = tensorTanh(var_17); 
-      void* var_19 = tensorPooling(var_18,0,2,2,0,0,2,2); 
-      void* var_22 = tensorGemmGPU(var_19, dense_1_w); 
-      void* var_23 = tensorAdd(var_22, dense_1_b); 
-      void* var_24 = tensorSoftmax(var_23); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-      float accuracy = computeAccuracy2(labels,batch_size,var_24); 
-      final_accuracy += accuracy;
-    
-      freeBatchMemory();
-    }
-
-    stopProfiling();
-
-    final_accuracy = final_accuracy / batch_count;
-    dumpFinalAccuracy(final_accuracy);
-
-  }
-
-  dumpExecutionAccuracies();
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/cifar_keras.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/cifar_keras.cc
deleted file mode 100644
index c746e5de6116f701df7370f93969d40486e04e90..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/cifar_keras.cc
+++ /dev/null
@@ -1,203 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* CIFAR-10 DNN ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 1000;
-
-  uint8_t* labels = readLabels("../model_params/cifar_keras/labels.bin", test_batch_size);
-  
-  void* input = readTrainedWeights("../model_params/cifar_keras/input.bin",
-			  	   float_type,
-				   test_batch_size, 3, 32, 32);
-
-  void* conv1_filter = readTrainedWeights("../model_params/cifar_keras/conv1.bin",
-					  float_type, 32, 3, 3, 3);  
-  void* conv1_bias = readTrainedWeights("../model_params/cifar_keras/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/cifar_keras/conv2.bin",
-					  float_type, 64, 32, 3, 3);  
-  void* conv2_bias = readTrainedWeights("../model_params/cifar_keras/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);
-
-  void* conv3_filter = readTrainedWeights("../model_params/cifar_keras/conv3.bin",
-					  float_type, 128, 64, 3, 3);  
-  void* conv3_bias = readTrainedWeights("../model_params/cifar_keras/conv3_bias.bin",
-					float_type, 1, 128, 1, 1);  
-  void* conv4_filter = readTrainedWeights("../model_params/cifar_keras/conv4.bin",
-					  float_type, 128, 128, 3, 3);  
-  void* conv4_bias = readTrainedWeights("../model_params/cifar_keras/conv4_bias.bin",
-					float_type, 1, 128, 1, 1);
-
-  
-  void* fc1_weights = readTrainedWeights("../model_params/cifar_keras/fc1.bin",
-					 float_type, 1, 1, 2048, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/cifar_keras/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/cifar_keras/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/cifar_keras/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    startProfiling();
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorConvolution(input, conv1_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-
-    tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* conv1_tanh = tensorTanh(conv1out);
-
-
-    // 2nd Layer
-    void* conv2out = tensorConvolution(conv1_tanh, conv2_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* conv2_tanh = tensorTanh(conv2out);
-
-    void* pool2out = tensorPooling(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-      
-
-    // 3rd Layer
-    void* conv3out = tensorConvolution(pool2out, conv3_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv3out, conv3_bias); // NOTE: In place operation
-  
-    void* conv3_tanh = tensorTanh(conv3out);
-
-    void* pool3out = tensorPooling(conv3_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-
-    // 4th Layer
-    void* conv4out = tensorConvolution(pool3out, conv4_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv4out, conv4_bias); // NOTE: In place operation
-  
-    void* conv4_tanh = tensorTanh(conv4out);
-
-    void* pool4out = tensorPooling(conv4_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-
-    printTensorDims(pool4out);
-    
- 
-    void* gemm1out = tensorGemmGPU(pool4out, fc1_weights);  
-
-    void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-
-    void* tanh1out = tensorTanh(gemm1biasout);
-  
-    void* gemm2out = tensorGemmGPU(tanh1out, fc2_weights);  
-  
-    void* gemm2_biasout = tensorAdd(gemm2out, fc2_bias);
-
-    void* tanh2out = tensorTanh(gemm2_biasout);
-  
-    void* result = tensorSoftmax(tanh2out);
-
-    printTensorDims(result);
-    
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/depthwise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/depthwise.cc
deleted file mode 100644
index 00e259079058f1be5163bd43d9982e07b82f1001..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/depthwise.cc
+++ /dev/null
@@ -1,84 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/depthwise_test_8/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string depthwise_conv2d_1_b_path =  dir_prefix + std::string("depthwise_conv2d_1_b.bin"); 
-  void* depthwise_conv2d_1_b =  readTrainedWeights(depthwise_conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,6272,1024); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 10000; 
-  int batch_size = 10000; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,1,28,28); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 2, 2, 1, 1, 1, 1); 
-    void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_3 = tensorPooling(var_2,0,2,2,0,0,2,2); 
-    void* var_4 = tensorConvolution(var_3, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_5 = tensorAdd(var_4, depthwise_conv2d_1_b); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_8 = tensorGemmGPU(var_6, dense_1_w); 
-    void* var_9 = tensorAdd(var_8, dense_1_b); 
-    void* var_10 = tensorRelu(var_9); 
-    void* var_11 = tensorGemmGPU(var_10, dense_2_w); 
-    void* var_12 = tensorAdd(var_11, dense_2_b); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_14 = tensorSoftmax(var_13); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_14); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/depthwise_batchnorm.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/depthwise_batchnorm.cc
deleted file mode 100644
index 84710565de3b2fdde6eca5d84c9e3f324eba1d50..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/depthwise_batchnorm.cc
+++ /dev/null
@@ -1,102 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/depthwise_batchnorm2/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string depthwise_conv2d_1_b_path =  dir_prefix + std::string("depthwise_conv2d_1_b.bin"); 
-  void* depthwise_conv2d_1_b =  readTrainedWeights(depthwise_conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,6272,1024); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 10000; 
-  int batch_size = 10000; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,1,28,28); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 2, 2, 1, 1, 1, 1); 
-    void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_3 = tensorBatchNorm(var_2,batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_4 = tensorPooling(var_3,0,2,2,0,0,2,2); 
-    void* var_5 = tensorConvolution(var_4, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_6 = tensorAdd(var_5, depthwise_conv2d_1_b); 
-    void* var_7 = tensorRelu(var_6); 
-    void* var_8 = tensorBatchNorm(var_7,batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_10 = tensorGemmGPU(var_8, dense_1_w); 
-    void* var_11 = tensorAdd(var_10, dense_1_b); 
-    void* var_12 = tensorRelu(var_11); 
-    void* var_13 = tensorGemmGPU(var_12, dense_2_w); 
-    void* var_14 = tensorAdd(var_13, dense_2_b); 
-    void* var_15 = tensorRelu(var_14); 
-    void* var_16 = tensorSoftmax(var_15); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_16); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc2_clipped.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc2_clipped.cc
deleted file mode 100644
index 575f9b164f865afe268a4692ee6c4fd88b6a45c6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc2_clipped.cc
+++ /dev/null
@@ -1,132 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-#include "../include/types.h"
-
-
-bool Opentuner_run = false;
-
-void test_2_Layer_clipped_FC(){
-
-  int total_runs = 10;
-
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* 2-Layer FC with clipped activations and weights ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-  
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				   float_type, test_batch_size, 1, 28, 28);  
-  void* fc1_weights = readTrainedWeights("../model_params/fc2_clipped/fc1.bin",
-					 float_type, 1, 1, 784, 128);  
-  void* fc1_bias = readTrainedWeights("../model_params/fc2_clipped/fc1_bias.bin",
-				      float_type, 1, 128, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/fc2_clipped/fc2.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/fc2_clipped/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);
-
-
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start execution profiling Tensor ops
-    startProfiling();
-    
-    // Layer-1
-    void* fc1out = tensorGemmGPU(input, fc1_weights);  
-  
-    void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-  
-    void* fc1_relu = tensorRelu2(fc1_bias_out, 0, 2);
-  
-    // Layer-2
-    void* fc2out = tensorGemmGPU(fc1_relu, fc2_weights);  
-  
-    void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  
-    void* fc2_relu = tensorRelu2(fc2_bias_out, 0, 2);
-  
-    void* result = tensorSoftmax(fc2_relu);
-
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-
-    dumpAccuracyNorms();
-    freeOutputTensors();
-   
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }    
-  }
-  
-}
-
-
-// If an argument is passed - the run goes into OpenTuner mode - waiting on a pipe
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-  
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_2_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc2_cpu.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc2_cpu.cc
deleted file mode 100644
index 41343afa0484d022758ee690e69f38221c9ece10..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc2_cpu.cc
+++ /dev/null
@@ -1,66 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../tensor_runtime/include/tensor_cpu_runtime.h"
-#include "../include/utils_cpu.h"
-#include "../include/types.h"
-
-
-void FC2(){
-
-  printf("********* 2-Layer FC with clipped activations and weights ********* \n");
-
-  int test_batch_size = 100;
-
-  uint8_t* labels = readLabels("./model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-
-  void* input = readTrainedWeightsCPU("./model_params/FC_network2/mnist_float_input.bin",
-				      float_type, test_batch_size, 1, 28, 28);
-
-  void* fc1_weights = readTrainedWeightsCPU("./model_params/fc2_clipped/fc1.bin",
-					 float_type, 1, 1, 784, 128);  
-  void* fc1_bias = readTrainedWeightsCPU("./model_params/fc2_clipped/fc1_bias.bin",
-				      float_type, 1, 128, 1, 1);  
-  void* fc2_weights = readTrainedWeightsCPU("./model_params/fc2_clipped/fc2.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc2_bias = readTrainedWeightsCPU("./model_params/fc2_clipped/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);
-
-
-  // Layer-1
-  void* fc1out = tensorGemmCPU(input, fc1_weights);  
-  
-  void* fc1_bias_out = tensorAddCPU(fc1out, fc1_bias);
-  
-  void* fc1_relu = tensorRelu2CPU(fc1_bias_out, 0, 2);
-  
-  // Layer-2
-  void* fc2out = tensorGemmCPU(fc1_relu, fc2_weights);  
-  
-  void* fc2_bias_out = tensorAddCPU(fc2out, fc2_bias);
-  
-  void* fc2_relu = tensorRelu2CPU(fc2_bias_out, 0, 2);
-  
-  void* result = tensorSoftmaxCPU(fc2_relu);
-
-  computeAccuracy2(labels, test_batch_size, result);
-    
-}
-
-
-// If an argument is passed - the run goes into OpenTuner mode - waiting on a pipe
-int main(int argc, char* argv[]){
-
-  llvm_hpvm_initTensorRt(0);
-
-  FC2();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc3_clipped.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc3_clipped.cc
deleted file mode 100644
index f566fd98a717698966c258377c32eda9ee30739d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc3_clipped.cc
+++ /dev/null
@@ -1,151 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-#include "../include/types.h"
-
-
-
-
-bool Opentuner_run = false;
-
-
-void test_3_Layer_clipped_FC(){
-
-
-  int total_runs = 10000;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* 3-Layer FC with clipped activations and weights ********* \n");
-
-  int test_batch_size = 5000;
-
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				   float_type, test_batch_size, 1, 28, 28);  
-  void* fc1_weights = readTrainedWeights("../model_params/fc3_clipped/fc1.bin",
-					 float_type, 1, 1, 784, 256);  
-  void* fc1_bias = readTrainedWeights("../model_params/fc3_clipped/fc1_bias.bin",
-				      float_type, 1, 256, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/fc3_clipped/fc2.bin",
-					 float_type, 1, 1, 256, 128);  
-  void* fc2_bias = readTrainedWeights("../model_params/fc3_clipped/fc2_bias.bin",
-				      float_type, 1, 128, 1, 1);  
-  void* fc3_weights = readTrainedWeights("../model_params/fc3_clipped/fc3.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc3_bias = readTrainedWeights("../model_params/fc3_clipped/fc3_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-    // Start execution profiling Tensor ops
-    startProfiling();
-
-    
-    // Layer-1
-    void* fc1out = tensorGemmGPU(input, fc1_weights);  
-  
-    void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
- 
-    void* fc1_relu = tensorRelu2(fc1_bias_out, 0, 2);
- 
-    // Layer-2
-    void* fc2out = tensorGemmGPU(fc1_relu, fc2_weights);  
-  
-    void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
- 
-    void* fc2_relu = tensorRelu2(fc2_bias_out, 0, 2);
- 
-    // Layer-3
-    void* fc3out = tensorGemmGPU(fc2_relu, fc3_weights);  
-  
-    void* fc3_bias_out = tensorAdd(fc3out, fc3_bias);
- 
-    void* fc3_relu = tensorRelu2(fc3_bias_out, 0, 2);
-  
-    void* result = tensorSoftmax(fc3_relu);
-
-    
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-
-    dumpAccuracyNorms();
-    freeOutputTensors();
-   
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-
-  }
-  
-  
-}
-
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_3_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc4_clipped.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc4_clipped.cc
deleted file mode 100644
index 24a4d888124c43e98c7b78a33c3b5eb29250808d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc4_clipped.cc
+++ /dev/null
@@ -1,156 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-#include "../include/types.h"
-
-
-bool Opentuner_run = false;
-
-
-void test_4_Layer_clipped_FC(){
-
-  int total_runs = 200;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-  
-  printf("********* 3-Layer FC with clipped activations and weights ********* \n");
-
-  int test_batch_size = 5000;
-  
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				   float_type, test_batch_size, 1, 28, 28);  
-  void* fc1_weights = readTrainedWeights("../model_params/fc4_clipped/fc1.bin",
-					 float_type, 1, 1, 784, 512);  
-  void* fc1_bias = readTrainedWeights("../model_params/fc4_clipped/fc1_bias.bin",
-				      float_type, 1, 512, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/fc4_clipped/fc2.bin",
-					 float_type, 1, 1, 512, 256);  
-  void* fc2_bias = readTrainedWeights("../model_params/fc4_clipped/fc2_bias.bin",
-				      float_type, 1, 256, 1, 1);  
-  void* fc3_weights = readTrainedWeights("../model_params/fc4_clipped/fc3.bin",
-					 float_type, 1, 1, 256, 128);  
-  void* fc3_bias = readTrainedWeights("../model_params/fc4_clipped/fc3_bias.bin",
-				      float_type, 1, 128, 1, 1);
-  void* fc4_weights = readTrainedWeights("../model_params/fc4_clipped/fc4.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc4_bias = readTrainedWeights("../model_params/fc4_clipped/fc4_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start execution profiling Tensor ops
-    startProfiling();
-  
-    // Layer-1
-    void* fc1out = tensorGemmGPU(input, fc1_weights);  
-
-    void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-    
-    void* fc1_relu = tensorRelu2(fc1_bias_out, 0, 2);
-    
-    // Layer-2
-    void* fc2out = tensorGemmGPU(fc1_relu, fc2_weights);  
-  
-    void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  
-    void* fc2_relu = tensorRelu2(fc2_bias_out, 0, 2);
-  
-    // Layer-3
-    void* fc3out = tensorGemmGPU(fc2_relu, fc3_weights);  
-  
-    void* fc3_bias_out = tensorAdd(fc3out, fc3_bias);
-  
-    void* fc3_relu = tensorRelu2(fc3_bias_out, 0, 2);
-  
-    // Layer-4
-    void* fc4out = tensorGemmGPU(fc3_relu, fc4_weights);  
-  
-    void* fc4_bias_out = tensorAdd(fc4out, fc4_bias);
-  
-    void* fc4_relu = tensorRelu2(fc4_bias_out, 0, 2); 
-  
-    void* result = tensorSoftmax(fc4_relu);
-
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-
-  
-}
-
-
-
-int main(int argc, char* argv[]){
- 
-  if(argc > 1)
-    Opentuner_run = true;
-  
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_4_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc4_cpu.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc4_cpu.cc
deleted file mode 100644
index b7f6e1eb5256ebd0dbcf718d3e8e30f0d93ecbc5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/fc4_cpu.cc
+++ /dev/null
@@ -1,143 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../tensor_runtime/include/tensor_cpu_runtime.h"
-#include "../include/utils_cpu.h"
-#include "../include/types.h"
-
-
-bool Opentuner_run = false;
-
-
-void test_4_Layer_clipped_FC(){
-
-  int total_runs = 200;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-  
-  printf("********* 3-Layer FC with clipped activations and weights ********* \n");
-
-  int test_batch_size = 500;
-  
-  uint8_t* labels = readLabels("./model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-
-  void* input = readTrainedWeightsCPU("./model_params/FC_network2/mnist_float_input.bin",
-				   float_type, test_batch_size, 1, 28, 28);  
-  void* fc1_weights = readTrainedWeightsCPU("./model_params/fc4_clipped/fc1.bin",
-					 float_type, 1, 1, 784, 512);  
-  void* fc1_bias = readTrainedWeightsCPU("./model_params/fc4_clipped/fc1_bias.bin",
-				      float_type, 1, 512, 1, 1);  
-  void* fc2_weights = readTrainedWeightsCPU("./model_params/fc4_clipped/fc2.bin",
-					 float_type, 1, 1, 512, 256);  
-  void* fc2_bias = readTrainedWeightsCPU("./model_params/fc4_clipped/fc2_bias.bin",
-				      float_type, 1, 256, 1, 1);  
-  void* fc3_weights = readTrainedWeightsCPU("./model_params/fc4_clipped/fc3.bin",
-					 float_type, 1, 1, 256, 128);  
-  void* fc3_bias = readTrainedWeightsCPU("./model_params/fc4_clipped/fc3_bias.bin",
-				      float_type, 1, 128, 1, 1);
-  void* fc4_weights = readTrainedWeightsCPU("./model_params/fc4_clipped/fc4.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc4_bias = readTrainedWeightsCPU("./model_params/fc4_clipped/fc4_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-
-  
-    // Layer-1
-    void* fc1out = tensorGemmCPU(input, fc1_weights);  
-	
-    void* fc1_bias_out = tensorAddCPU(fc1out, fc1_bias);
-    
-    void* fc1_relu = tensorRelu2CPU(fc1_bias_out, 0, 2);
-    
-    // Layer-2
-    void* fc2out = tensorGemmCPU(fc1_relu, fc2_weights);  
-  
-    void* fc2_bias_out = tensorAddCPU(fc2out, fc2_bias);
-  
-    void* fc2_relu = tensorRelu2CPU(fc2_bias_out, 0, 2);
-    
-    // Layer-3
-    void* fc3out = tensorGemmCPU(fc2_relu, fc3_weights);  
-  
-    void* fc3_bias_out = tensorAddCPU(fc3out, fc3_bias);
-  
-    void* fc3_relu = tensorRelu2CPU(fc3_bias_out, 0, 2);
-  
-    // Layer-4
-    void* fc4out = tensorGemmCPU(fc3_relu, fc4_weights);  
-  
-    void* fc4_bias_out = tensorAddCPU(fc4out, fc4_bias);
-  
-    void* fc4_relu = tensorRelu2CPU(fc4_bias_out, 0, 2);
-  
-    void* result = tensorSoftmaxCPU(fc4_relu);
-	
-    computeAccuracy2(labels, test_batch_size, result);
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-
-  
-}
-
-
-
-int main(int argc, char* argv[]){
- 
-  if(argc > 1)
-    Opentuner_run = true;
-  
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_4_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/alexnet2_cifar10_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/alexnet2_cifar10_half.cc
deleted file mode 100644
index 0c0c6aaa6291bcd67f3e3bff1eb7b2481bf72f1e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/alexnet2_cifar10_half.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  printf("********* Alexnet2 CIFAR-10 DNN ********** \n");
- 
-  std::string dir_prefix = std::string("../model_params/alexnet2_cifar10/"); 
-  std::string input_path =  dir_prefix + std::string("norm_cifar_input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("test_labels.bin"); 
-
-  void* conv1_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv1.bin",
-					  float_type, 32, 3, 3, 3);  
-  void* conv1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv2.bin",
-					  float_type, 32, 32, 3, 3);  
-  void* conv2_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv2_bias.bin",
-					float_type, 1, 32, 1, 1);
-  void* conv3_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv3.bin",
-					  float_type, 64, 32, 3, 3);  
-  void* conv3_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv3_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* conv4_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv4.bin",
-					  float_type, 64, 64, 3, 3);  
-  void* conv4_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv4_bias.bin",
-					float_type, 1, 64, 1, 1);
-  void* conv5_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv5.bin",
-					  float_type, 128, 64, 3, 3);  
-  void* conv5_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv5_bias.bin",
-					float_type, 1, 128, 1, 1);
-  void* conv6_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv6.bin",
-					  float_type, 128, 128, 3, 3);  
-  void* conv6_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv6_bias.bin",
-					float_type, 1, 128, 1, 1);
-  
-  void* fc1_weights = readTrainedWeights("../model_params/alexnet2_cifar10/fc1.bin",
-					 float_type, 1, 1, 2048, 10);  
-  void* fc1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/fc1_bias.bin",
-				      float_type, 1, 10, 1, 1);  
- 
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-
-  startMemTracking();
-
-  int test_input_size = 2000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-    
-    void* conv1out = tensorHalfConvolution(input, conv1_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorHalfAdd(conv1out, conv1_bias); 
-    void* conv1_tanh = tensorHalfTanh(conv1out);
-    
-    // 2nd Layer
-    void* conv2out = tensorHalfConvolution(conv1_tanh, conv2_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorHalfAdd(conv2out, conv2_bias); 
-    void* conv2_tanh = tensorHalfTanh(conv2out);
-    void* pool2out = tensorHalfPooling(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-     
-    // 3rd Layer
-    void* conv3out = tensorHalfConvolution(pool2out, conv3_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorHalfAdd(conv3out, conv3_bias); 
-    void* conv3_tanh = tensorHalfTanh(conv3out);
-
-    // 4th Layer
-    void* conv4out = tensorHalfConvolution(conv3_tanh, conv4_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorHalfAdd(conv4out, conv4_bias); 
-    void* conv4_tanh = tensorHalfTanh(conv4out);
-    void* pool4out = tensorHalfPooling(conv4_tanh, 0, 2, 2, 0, 0, 2, 2);
-    
-    // 5th Layer
-    void* conv5out = tensorHalfConvolution(pool4out, conv5_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorHalfAdd(conv5out, conv5_bias); 
-    void* conv5_tanh = tensorHalfTanh(conv5out);
-
-    // 6th Layer
-    void* conv6out = tensorHalfConvolution(conv5_tanh, conv6_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    tensorHalfAdd(conv6out, conv6_bias); 
-  
-    void* conv6_tanh = tensorHalfTanh(conv6out);
-    void* pool6out = tensorHalfPooling(conv6_tanh, 0, 2, 2, 0, 0, 2, 2);
-    
-    // final FC Layer
-    void* gemm1out = tensorHalfGemmGPU(pool6out, fc1_weights);  
-    void* gemm1biasout = tensorHalfAdd(gemm1out, fc1_bias);
-    void* result = tensorSoftmax(gemm1biasout);
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, result); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-}
-
-
-int main(int argc, char* argv[]){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/alexnet_cifar10_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/alexnet_cifar10_half.cc
deleted file mode 100644
index 6cd19407f4780de516cb777d40644ca3a7e1abc3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/alexnet_cifar10_half.cc
+++ /dev/null
@@ -1,104 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/alexnet_cifar10_front/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv0.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv_bias0.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv3.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv_bias3.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv6.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv_bias6.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv7.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv_bias7.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv8.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv_bias8.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("fc12.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("fc_bias12.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 2000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-   for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);    
-
-    void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 5, 5, 1, 1, 1, 0); 
-    void* var_1 = tensorHalfAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorHalfTanh(var_1); 
-    void* var_3 = tensorHalfPooling(var_2,0,2,2,0,0,2,2); 
-    void* var_5 = tensorHalfConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 0); 
-    void* var_6 = tensorHalfAdd(var_5, conv2d_2_b); 
-    void* var_7 = tensorHalfTanh(var_6); 
-    void* var_8 = tensorHalfPooling(var_7,0,2,2,0,0,2,2); 
-    void* var_10 = tensorHalfConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_11 = tensorHalfAdd(var_10, conv2d_3_b); 
-    void* var_12 = tensorHalfTanh(var_11); 
-    void* var_13 = tensorHalfConvolution(var_12, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_14 = tensorHalfAdd(var_13, conv2d_4_b); 
-    void* var_15 = tensorHalfTanh(var_14); 
-    void* var_16 = tensorHalfConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_17 = tensorHalfAdd(var_16, conv2d_5_b); 
-    void* var_18 = tensorHalfTanh(var_17); 
-    void* var_19 = tensorHalfPooling(var_18,0,2,2,0,0,2,2); 
-    void* var_22 = tensorHalfGemmGPU(var_19, dense_1_w); 
-    void* var_23 = tensorHalfAdd(var_22, dense_1_b); 
-    void* var_24 = tensorSoftmax(var_23); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels,batch_size,var_24); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/fc2_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/fc2_half.cc
deleted file mode 100644
index 44c03aab875a6de4af6c87776241295cd1fd673b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/fc2_half.cc
+++ /dev/null
@@ -1,137 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-#include "../../include/types.h"
-
-
-bool Opentuner_run = false;
-
-
-void test_2_Layer_clipped_FC(){
-
-  int total_runs = 1;
-
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* 2-Layer FC with clipped activations and weights ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-  
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				   float_type, test_batch_size, 1, 28, 28);  
-  void* fc1_weights = readTrainedWeights("../model_params/fc2_clipped/fc1.bin",
-					 float_type, 1, 1, 784, 128);  
-  void* fc1_bias = readTrainedWeights("../model_params/fc2_clipped/fc1_bias.bin",
-				      float_type, 1, 128, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/fc2_clipped/fc2.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/fc2_clipped/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);
-
-
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start execution profiling Tensor ops
-    startProfiling();
-    
-    // Layer-1
-    void* fc1out = tensorHalfGemm(input, fc1_weights);  
-  
-    void* fc1_bias_out = tensorHalfAdd(fc1out, fc1_bias);
-  
-    void* fc1_relu = tensorHalfRelu2(fc1_bias_out, 0, 2);
-  
-    // Layer-2
-    void* fc2out = tensorHalfGemm(fc1_relu, fc2_weights);  
-  
-    void* fc2_bias_out = tensorHalfAdd(fc2out, fc2_bias);
-  
-    void* fc2_relu = tensorHalfRelu2(fc2_bias_out, 0, 2);
-  
-    void* result = tensorSoftmax(fc2_relu);
-
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-
-    dumpAccuracyNorms();
-    freeOutputTensors();
-   
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-  
-}
-
-
-// If an argument is passed - the run goes into OpenTuner mode - waiting on a pipe
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-  
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_2_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/fc3_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/fc3_half.cc
deleted file mode 100644
index 697fea9b8aa61a8c3cf5ec3e8d0d66466df9b1e8..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/fc3_half.cc
+++ /dev/null
@@ -1,151 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-#include "../../include/types.h"
-
-
-
-
-bool Opentuner_run = false;
-
-
-void test_3_Layer_clipped_FC(){
-
-
-  int total_runs = 1000;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* 3-Layer FC with clipped activations and weights ********* \n");
-
-  int test_batch_size = 5000;
-
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				   float_type, test_batch_size, 1, 28, 28);  
-  void* fc1_weights = readTrainedWeights("../model_params/fc3_clipped/fc1.bin",
-					 float_type, 1, 1, 784, 256);  
-  void* fc1_bias = readTrainedWeights("../model_params/fc3_clipped/fc1_bias.bin",
-				      float_type, 1, 256, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/fc3_clipped/fc2.bin",
-					 float_type, 1, 1, 256, 128);  
-  void* fc2_bias = readTrainedWeights("../model_params/fc3_clipped/fc2_bias.bin",
-				      float_type, 1, 128, 1, 1);  
-  void* fc3_weights = readTrainedWeights("../model_params/fc3_clipped/fc3.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc3_bias = readTrainedWeights("../model_params/fc3_clipped/fc3_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-    // Start execution profiling Tensor ops
-    startProfiling();
-
-    
-    // Layer-1
-    void* fc1out = tensorHalfGemm(input, fc1_weights);  
-  
-    void* fc1_bias_out = tensorHalfAdd(fc1out, fc1_bias);
- 
-    void* fc1_relu = tensorHalfRelu2(fc1_bias_out, 0, 2);
- 
-    // Layer-2
-    void* fc2out = tensorHalfGemm(fc1_relu, fc2_weights);  
-  
-    void* fc2_bias_out = tensorHalfAdd(fc2out, fc2_bias);
- 
-    void* fc2_relu = tensorHalfRelu2(fc2_bias_out, 0, 2);
- 
-    // Layer-3
-    void* fc3out = tensorHalfGemm(fc2_relu, fc3_weights);  
-  
-    void* fc3_bias_out = tensorHalfAdd(fc3out, fc3_bias);
- 
-    void* fc3_relu = tensorHalfRelu2(fc3_bias_out, 0, 2);
-  
-    void* result = tensorSoftmax(fc3_relu);
-
-    
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-
-    dumpAccuracyNorms();
-    freeOutputTensors();
-   
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-
-  }
-  
-  
-}
-
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_3_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/fc4_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/fc4_half.cc
deleted file mode 100644
index ad999165cfd4148479de58e24fed8291161da491..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/fc4_half.cc
+++ /dev/null
@@ -1,156 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-#include "../../include/types.h"
-
-
-bool Opentuner_run = false;
-
-
-void test_4_Layer_clipped_FC(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-  
-  printf("********* 3-Layer FC with clipped activations and weights ********* \n");
-
-  int test_batch_size = 5000;
-  
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				   float_type, test_batch_size, 1, 28, 28);  
-  void* fc1_weights = readTrainedWeights("../model_params/fc4_clipped/fc1.bin",
-					 float_type, 1, 1, 784, 512);  
-  void* fc1_bias = readTrainedWeights("../model_params/fc4_clipped/fc1_bias.bin",
-				      float_type, 1, 512, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/fc4_clipped/fc2.bin",
-					 float_type, 1, 1, 512, 256);  
-  void* fc2_bias = readTrainedWeights("../model_params/fc4_clipped/fc2_bias.bin",
-				      float_type, 1, 256, 1, 1);  
-  void* fc3_weights = readTrainedWeights("../model_params/fc4_clipped/fc3.bin",
-					 float_type, 1, 1, 256, 128);  
-  void* fc3_bias = readTrainedWeights("../model_params/fc4_clipped/fc3_bias.bin",
-				      float_type, 1, 128, 1, 1);
-  void* fc4_weights = readTrainedWeights("../model_params/fc4_clipped/fc4.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc4_bias = readTrainedWeights("../model_params/fc4_clipped/fc4_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start execution profiling Tensor ops
-    startProfiling();
-  
-    // Layer-1
-    void* fc1out = tensorHalfGemm(input, fc1_weights);  
-
-    void* fc1_bias_out = tensorHalfAdd(fc1out, fc1_bias);
-    
-    void* fc1_relu = tensorHalfRelu2(fc1_bias_out, 0, 2);
-    
-    // Layer-2
-    void* fc2out = tensorHalfGemm(fc1_relu, fc2_weights);  
-  
-    void* fc2_bias_out = tensorHalfAdd(fc2out, fc2_bias);
-  
-    void* fc2_relu = tensorHalfRelu2(fc2_bias_out, 0, 2);
-  
-    // Layer-3
-    void* fc3out = tensorHalfGemm(fc2_relu, fc3_weights);  
-  
-    void* fc3_bias_out = tensorHalfAdd(fc3out, fc3_bias);
-  
-    void* fc3_relu = tensorHalfRelu2(fc3_bias_out, 0, 2);
-  
-    // Layer-4
-    void* fc4out = tensorHalfGemm(fc3_relu, fc4_weights);  
-  
-    void* fc4_bias_out = tensorHalfAdd(fc4out, fc4_bias);
-  
-    void* fc4_relu = tensorHalfRelu2(fc4_bias_out, 0, 2); 
-  
-    void* result = tensorSoftmax(fc4_relu);
-
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-
-  
-}
-
-
-
-int main(int argc, char* argv[]){
- 
-  if(argc > 1)
-    Opentuner_run = true;
-  
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_4_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/lenet_keras_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/lenet_keras_half.cc
deleted file mode 100644
index dd68f2b48eb66456061bb93decc1cbd985887be0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/lenet_keras_half.cc
+++ /dev/null
@@ -1,171 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_keras/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_keras/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_keras/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_keras/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_keras/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet_keras/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_keras/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet_keras/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    startProfiling();
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorHalfConvolution(input, conv1_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-
-    // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-    tensorHalfAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* pool1out = tensorHalfPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv1_tanh = tensorHalfTanh(pool1out);
-
-    // NOTE: input channels have to match between tensor op inputs and outputs 
-    void* conv2out = tensorHalfConvolution(conv1_tanh, conv2_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-    tensorHalfAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* pool2out = tensorHalfPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv2_tanh = tensorHalfTanh(pool2out);
-
-    void* gemm1out = tensorHalfGemm(conv2_tanh, fc1_weights);  
-
-    void* gemm1biasout = tensorHalfAdd(gemm1out, fc1_bias);
-
-    void* tanh1out = tensorHalfTanh(gemm1biasout);
-  
-    void* gemm2out = tensorHalfGemm(tanh1out, fc2_weights);  
-  
-    void* gemm2_biasout = tensorHalfAdd(gemm2out, fc2_bias);
-
-    void* tanh2out = tensorHalfTanh(gemm2_biasout);
-  
-    void* result = tensorSoftmax(tanh2out);
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/lenet_tanh_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/lenet_tanh_half.cc
deleted file mode 100644
index bb45b14d62e061e704b252aa44e602e0c1d08ba7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/lenet_tanh_half.cc
+++ /dev/null
@@ -1,173 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-#include "../../include/types.h"
-
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_tanh2/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_tanh2/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_tanh2/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_tanh2/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_tanh2/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet_tanh2/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_tanh2/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet_tanh2/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    startProfiling();
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorHalfConvolution(input, conv1_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-
-    // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-    tensorHalfAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* pool1out = tensorHalfPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv1_tanh = tensorHalfTanh(pool1out);
-
-    // NOTE: input channels have to match between tensor op inputs and outputs 
-    void* conv2out = tensorHalfConvolution(conv1_tanh, conv2_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-    tensorHalfAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* pool2out = tensorHalfPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv2_tanh = tensorHalfTanh(pool2out);
-
-    void* gemm1out = tensorHalfGemm(conv2_tanh, fc1_weights);  
-
-    void* gemm1biasout = tensorHalfAdd(gemm1out, fc1_bias);
-
-    void* tanh1out = tensorHalfTanh(gemm1biasout);
-  
-    void* gemm2out = tensorHalfGemm(tanh1out, fc2_weights);  
-  
-    void* gemm2_biasout = tensorHalfAdd(gemm2out, fc2_bias);
-
-    void* tanh2out = tensorHalfTanh(gemm2_biasout);
-  
-    void* result = tensorSoftmax(tanh2out);
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/mobilenet_depthwise_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/mobilenet_depthwise_half.cc
deleted file mode 100644
index dabafd4345f29d00c7271c796a8497aba8b7772d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/mobilenet_depthwise_half.cc
+++ /dev/null
@@ -1,411 +0,0 @@
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-int main(){ 
-
-    llvm_hpvm_initTensorRt(0); 
-
-
-    std::string dir_prefix = std::string("../model_params/mobilenet/"); 
-    std::string input_path =  dir_prefix + std::string("input.bin"); 
-    std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-    std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-    void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-    std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-    void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-    void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-    void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-    void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-    std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-    void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-    std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-    void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-    void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-    void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-    void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-    std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-    void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-    std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-    void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-    void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-    void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-    void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-    std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-    void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-    std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-    void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-    void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-    void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-    void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-    std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-    void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-    std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-    void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-    void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-    void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-    void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-    std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-    void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-    std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-    void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-    void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-    void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-    void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-    std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-    void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-    std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-    void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-    void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-    void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-    void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-    std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-    void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-    std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-    void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-    void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-    void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-    void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-    std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-    void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-    std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-    void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-    void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-    void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-    void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-    std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-    void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-    std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-    void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-    void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-    void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-    void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-    std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-    void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-    std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-    void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-    void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-    void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-    void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-    std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-    void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-    std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-    void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-    void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-    void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-    void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-    std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-    void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-    std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-    void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-    void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-    void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-    void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-    std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-    void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-    std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-    void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-    void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-    void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-    void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-    void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-    std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-    void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-    void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-    void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-    void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-    std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-    void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-    std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-    void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-    void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-    void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-    void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-    void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-    std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-    void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-    void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-    void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-    void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-    std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-    void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-    std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-    void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-    void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-    void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-    void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-    void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-    std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-    void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-    void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-    void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-    void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-    std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-    void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-    std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-    void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-    void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-    void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-    void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-    void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-    std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-    void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-    void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-    void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-    void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-    std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-    void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-    std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-    void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-    void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-    void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-    void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-    void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-    std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-    void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-    void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-    void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-    void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-    std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-    void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-    std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-    void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-    void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-    void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-    void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-    void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-    std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-    void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-    std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-    void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-    std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-    void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-    std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-    void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-    std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-    void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-    std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-    void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-    std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-    void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-    std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-    void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-    std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-    void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-    std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-    void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-    std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-    void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-    std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-    void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-    std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-    void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-    std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-    void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-    std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-    void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-    std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-    void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-    startMemTracking(); 
-
-    int test_input_size = 2000; 
-    int batch_size = 1000;  
-    int batch_count = test_input_size / batch_size; 
-
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-        int start = i * batch_size; 
-        int end = (i + 1) * batch_size; 
-
-        void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-        void* var_1 = tensorHalfBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-        void* var_2 = tensorHalfRelu(var_1); 
-        void* var_4 = tensorHalfConvCutlass(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-        void* var_5 = tensorHalfBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-        void* var_6 = tensorHalfRelu(var_5); 
-        void* var_7 = tensorHalfConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-        void* var_8 = tensorHalfBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-        void* var_9 = tensorHalfRelu(var_8); 
-        void* var_11 = tensorHalfConvCutlass(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-        void* var_12 = tensorHalfBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-        void* var_13 = tensorHalfRelu(var_12); 
-        void* var_14 = tensorHalfConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-        void* var_15 = tensorHalfBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-        void* var_16 = tensorHalfRelu(var_15); 
-        void* var_18 = tensorHalfConvCutlass(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-        void* var_19 = tensorHalfBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-        void* var_20 = tensorHalfRelu(var_19); 
-        void* var_21 = tensorHalfConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-        void* var_22 = tensorHalfBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-        void* var_23 = tensorHalfRelu(var_22); 
-        void* var_26 = tensorHalfConvCutlass(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-        void* var_27 = tensorHalfBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-        void* var_28 = tensorHalfRelu(var_27); 
-        void* var_29 = tensorHalfConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-        void* var_30 = tensorHalfBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-        void* var_31 = tensorHalfRelu(var_30); 
-        void* var_33 = tensorHalfConvCutlass(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-        void* var_34 = tensorHalfBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-        void* var_35 = tensorHalfRelu(var_34); 
-        void* var_36 = tensorHalfConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-        void* var_37 = tensorHalfBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-        void* var_38 = tensorHalfRelu(var_37); 
-        void* var_41 = tensorHalfConvCutlass(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-        void* var_42 = tensorHalfBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-        void* var_43 = tensorHalfRelu(var_42); 
-        void* var_44 = tensorHalfConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-        void* var_45 = tensorHalfBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-        void* var_46 = tensorHalfRelu(var_45); 
-        void* var_48 = tensorHalfConvCutlass(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-        void* var_49 = tensorHalfBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-        void* var_50 = tensorHalfRelu(var_49); 
-        void* var_51 = tensorHalfConvolution(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-        void* var_52 = tensorHalfBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-        void* var_53 = tensorHalfRelu(var_52); 
-        void* var_55 = tensorHalfConvCutlass(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-        void* var_56 = tensorHalfBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-        void* var_57 = tensorHalfRelu(var_56); 
-        void* var_58 = tensorHalfConvolution(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-        void* var_59 = tensorHalfBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-        void* var_60 = tensorHalfRelu(var_59); 
-        void* var_63 = tensorHalfConvCutlass(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-        void* var_64 = tensorHalfBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-        void* var_65 = tensorHalfRelu(var_64); 
-        void* var_66 = tensorHalfConvolution(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-        void* var_67 = tensorHalfBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-        void* var_68 = tensorHalfRelu(var_67); 
-        void* var_70 = tensorHalfConvCutlass(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-        void* var_71 = tensorHalfBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-        void* var_72 = tensorHalfRelu(var_71); 
-        void* var_73 = tensorHalfConvolution(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-        void* var_74 = tensorHalfBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-        void* var_75 = tensorHalfRelu(var_74); 
-        void* var_77 = tensorHalfConvCutlass(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-        void* var_78 = tensorHalfBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-        void* var_79 = tensorHalfRelu(var_78); 
-        void* var_80 = tensorHalfConvolution(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-        void* var_81 = tensorHalfBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-        void* var_82 = tensorHalfRelu(var_81); 
-        void* var_85 = tensorHalfConvCutlass(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-        void* var_86 = tensorHalfBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-        void* var_87 = tensorHalfRelu(var_86); 
-        void* var_88 = tensorHalfConvolution(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-        void* var_89 = tensorHalfBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-        void* var_90 = tensorHalfRelu(var_89); 
-        void* var_92 = tensorHalfConvCutlass(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-        void* var_93 = tensorHalfBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-        void* var_94 = tensorHalfRelu(var_93); 
-        void* var_95 = tensorHalfConvolution(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-        void* var_96 = tensorHalfBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-        void* var_97 = tensorHalfRelu(var_96); 
-        void* var_99 = tensorHalfPooling(var_97,1,2,2,0,0,2,2); 
-        void* var_101 = tensorHalfGemmGPU(var_99, dense_1_w); 
-        void* var_102 = tensorHalfAdd(var_101, dense_1_b); 
-        void* var_103 = tensorSoftmax(var_102); 
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-        float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-        final_accuracy += accuracy; 
-        freeBatchMemory(); 
-    }
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-    llvm_hpvm_cleanupTensorRt(); 
-
-    return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/mobilenet_shallow_depthwise_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/mobilenet_shallow_depthwise_half.cc
deleted file mode 100644
index 6b0d02f01e8e3516d4b5d03899317760d3629388..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/mobilenet_shallow_depthwise_half.cc
+++ /dev/null
@@ -1,236 +0,0 @@
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-int main(int argc, char* argv[]){ 
-
-    llvm_hpvm_initTensorRt(0); 
-
-    //std::string dir_prefix = std::string("../../keras/data/mobilenet_shallow_nathan/");
-
-    std::string dir_prefix = std::string("../model_params/mobilenet_shallow/");
-
-    std::string input_path =  dir_prefix + std::string("input.bin"); 
-    std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-    std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-    void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-    std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-    void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-    void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-    void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-    void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-    std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-    void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-    std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-    void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-    void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-    void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-    std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-    void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-    std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-    void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-    std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-    void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-    void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-    void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-    void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-    std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-    void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-    std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-    void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-    void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-    void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-    std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-    void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-    std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-    void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-    std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-    void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-    void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-    void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-    void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-    std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-    void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-    std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-    void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-    void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-    void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-    void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-    std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-    void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-    std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-    void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-    void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-    void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-    void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-    std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-    void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-    std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-    void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-    void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-    void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-    std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-    void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-    std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-    void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-    std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-    void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-    void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-    void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-    void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-    std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-    void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-    std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-    void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-    void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-    void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-    void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-    std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-    void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-    std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-    void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-    void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-    void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-    void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-    std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-    void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-    std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-    void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-    void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-    void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-    std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-    void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-    std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-    void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-    std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-    void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-    void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-    void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-    std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-    void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-    std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-    void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-    std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-    void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-    startMemTracking(); 
-
-    int test_input_size = 2000; 
-    int batch_size = 1000; 
-    int batch_count = test_input_size / batch_size; 
-
-
-    float final_accuracy = 0.0;
-
-    for(int i = 0; i < batch_count; i++){ 
-
-        int start = i * batch_size; 
-        int end = (i + 1) * batch_size; 
-
-        void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-        void* var_1 = tensorHalfBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-        void* var_2 = tensorHalfRelu(var_1); 
-        void* var_4 = tensorHalfConvCutlass(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-        void* var_5 = tensorHalfBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-        void* var_6 = tensorHalfRelu(var_5); 
-        void* var_7 = tensorHalfConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-        void* var_8 = tensorHalfBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-        void* var_9 = tensorHalfRelu(var_8); 
-        void* var_11 = tensorHalfConvCutlass(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-        void* var_12 = tensorHalfBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-        void* var_13 = tensorHalfRelu(var_12); 
-        void* var_14 = tensorHalfConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-        void* var_15 = tensorHalfBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-        void* var_16 = tensorHalfRelu(var_15); 
-        void* var_18 = tensorHalfConvCutlass(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-        void* var_19 = tensorHalfBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-        void* var_20 = tensorHalfRelu(var_19); 
-        void* var_21 = tensorHalfConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-        void* var_22 = tensorHalfBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-        void* var_23 = tensorHalfRelu(var_22); 
-        void* var_26 = tensorHalfConvCutlass(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-        void* var_27 = tensorHalfBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-        void* var_28 = tensorHalfRelu(var_27); 
-        void* var_29 = tensorHalfConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-        void* var_30 = tensorHalfBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-        void* var_31 = tensorHalfRelu(var_30); 
-        void* var_33 = tensorHalfConvCutlass(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-        void* var_34 = tensorHalfBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-        void* var_35 = tensorHalfRelu(var_34); 
-        void* var_36 = tensorHalfConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-        void* var_37 = tensorHalfBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-        void* var_38 = tensorHalfRelu(var_37); 
-        void* var_41 = tensorHalfConvCutlass(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-        void* var_42 = tensorHalfBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-        void* var_43 = tensorHalfRelu(var_42); 
-        void* var_44 = tensorHalfConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-        void* var_45 = tensorHalfBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-        void* var_46 = tensorHalfRelu(var_45); 
-        void* var_47 = tensorHalfPooling(var_46,1,2,2,0,0,2,2); 
-        void* var_49 = tensorHalfGemmGPU(var_47, dense_1_w); 
-        void* var_50 = tensorHalfAdd(var_49, dense_1_b); 
-        void* var_51 = tensorSoftmax(var_50); 
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-        float accuracy = computeAccuracy2(labels, batch_size, var_51); 
-        final_accuracy += accuracy; 
-        freeBatchMemory(); 
-
-    }
-
-    final_accuracy = final_accuracy / batch_count;
-    dumpFinalAccuracy(final_accuracy);
-
-    dumpExecutionAccuracies();
-
-    llvm_hpvm_cleanupTensorRt(); 
-
-    return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GEMO_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GEMO_half.cc
deleted file mode 100644
index 23fd15576ace419976a2b4d7f8191079a59c8c31..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GEMO_half.cc
+++ /dev/null
@@ -1,164 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  startProfiling();
-  
-  printf("********* Pipeline: Gaussian - Emboss - Motion Blur - Outline ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  //long int test_batch_size = 9145;
-  //long int test_batch_size = 4572;
-  long int test_batch_size = 2000;
-  
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-				   float_type,
-				   test_batch_size, 1, H, W);
-  
-  printf("Reading golden output\n");
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEMO_calib.bin",
-					   float_type,
-					   test_batch_size, 1, H, W);
-
-  
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorHalfConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-
-    void * gaussian_out_clip = tensorHalfRelu2(gaussian_out, 0, 255);
-
-    void* emboss_out = tensorHalfConvolution(gaussian_out_clip, emboss_filter, 2, 2, 1, 1,
-                                       conv_mode, conv_precision);
-    void* emboss_bias_out = tensorHalfAdd(emboss_out, emboss_bias);
-    void* emboss_bias_out_clip = tensorHalfRelu2(emboss_bias_out, 0, 255);
-
-    void* motionblur_out = tensorHalfConvolution(emboss_bias_out_clip, motionblur_filter, 4, 4, 1, 1,
-                                       conv_mode, conv_precision);
-    void * motionblur_out_clip = tensorHalfRelu2(motionblur_out, 0, 255);
-
-    void* outline_out = tensorHalfConvolution(motionblur_out_clip, outline_filter, 1, 1, 1, 1,
-                                       conv_mode, conv_precision);
-    void * result = tensorHalfRelu2(outline_out, 0, 255);
-
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GEMO_calib.bin");
-    
-    
-    computePSNRViolation(result, golden_output, 30);
-
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  stopProfiling(); 
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GEOM_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GEOM_half.cc
deleted file mode 100644
index 07875553d59a3635c21db8975db9e8986d1bc6c9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GEOM_half.cc
+++ /dev/null
@@ -1,101 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-
-  startProfiling();
-  
-  printf("********* Pipeline: Gaussian - Emboss - Outline - Motion Blur ********** \n");
-
-  long int test_batch_size = 2000;
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W); 
-  
-  printf("Reading golden output\n");
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEOM_calib.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  
-  for(int i = 0; i < total_runs; i++){
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorHalfConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-
-    void * gaussian_out_clip = tensorHalfRelu2(gaussian_out, 0, 255);
-
-    void* emboss_out = tensorHalfConvolution(gaussian_out_clip, emboss_filter, 2, 2, 1, 1,
-                                       conv_mode, conv_precision);
-    void* emboss_bias_out = tensorHalfAdd(emboss_out, emboss_bias);
-    void* emboss_bias_out_clip = tensorHalfRelu2(emboss_bias_out, 0, 255);
-
-    void* outline_out = tensorHalfConvolution(emboss_bias_out_clip, outline_filter, 1, 1, 1, 1,
-                                       conv_mode, conv_precision);
-    void * outline_out_clip = tensorHalfRelu2(outline_out, 0, 255);
-
-    void* motionblur_out = tensorHalfConvolution(outline_out_clip, motionblur_filter, 4, 4, 1, 1,
-                                       conv_mode, conv_precision);
-    void * result = tensorHalfRelu2(motionblur_out, 0, 255);
-
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GEOM_calib.bin");
-    
-    computePSNRViolation(result, golden_output, 30);
-    
-  }
-
-  stopProfiling();  
-}
-
-
-int main(int argc, char* argv[]){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GEO_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GEO_half.cc
deleted file mode 100644
index 369d8ff4f76aaff4663532ee634a79d54b94b2aa..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GEO_half.cc
+++ /dev/null
@@ -1,93 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  
-  printf("********* Pipeline: Gaussian - Emboss - Outline ********** \n");
-
-  startProfiling();
-    
-  long int test_batch_size = 2000; 
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-  printf("Reading golden output\n");
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEO_calib.bin",
-					   float_type,
-					   test_batch_size, 1, H, W);
-
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  
-  for(int i = 0; i < total_runs; i++){
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorHalfConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-
-    void * gaussian_out_clip = tensorHalfRelu2(gaussian_out, 0, 255);
-
-    void* emboss_out = tensorHalfConvolution(gaussian_out_clip, emboss_filter, 2, 2, 1, 1,
-                                       conv_mode, conv_precision);
-    void* emboss_bias_out = tensorHalfAdd(emboss_out, emboss_bias);
-    void* emboss_bias_out_clip = tensorHalfRelu2(emboss_bias_out, 0, 255);
-
-    void* outline_out = tensorHalfConvolution(emboss_bias_out_clip, outline_filter, 1, 1, 1, 1,
-                                       conv_mode, conv_precision);
-    void * result = tensorHalfRelu2(outline_out, 0, 255);
-
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GEO_calib.bin");
-    
-    computePSNRViolation(result, golden_output, 30);
-    
-  }
-
-  stopProfiling();
-}
-
-
-int main(int argc, char* argv[]){
-  
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GSME_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GSME_half.cc
deleted file mode 100644
index e2d2ff18090c085405ec94902696e1a6631d94a7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GSME_half.cc
+++ /dev/null
@@ -1,96 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  
-  printf("********* Pipeline: Gaussian - Sharpen - Motion Blur - Emboss ********** \n");
-
-  startProfiling();
-  
-  //long int test_batch_size = 4572;
-  long int test_batch_size = 2000;
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-				   float_type,
-				   test_batch_size, 1, H, W);
-  
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  
-  for(int i = 0; i < total_runs; i++){
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorHalfConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-    void * gaussian_out_clip = tensorHalfRelu2(gaussian_out, 0, 255);
-
-    void* sharpen_out = tensorHalfConvolution(gaussian_out_clip, sharpen_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    void * sharpen_out_clip = tensorHalfRelu2(sharpen_out, 0, 255);
-
-    void* motionblur_out = tensorHalfConvolution(sharpen_out_clip, motionblur_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-    void * motionblur_out_clip = tensorHalfRelu2(motionblur_out, 0, 255);
-
-    void* emboss_out = tensorHalfConvolution(motionblur_out_clip, emboss_filter, 2, 2, 1, 1,
-                                       conv_mode, conv_precision);
-    void* emboss_bias_out = tensorHalfAdd(emboss_out, emboss_bias);
-    void* result = tensorHalfRelu2(emboss_bias_out, 0, 255);
-
-
-    //void* result = gaussian_out;
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    //hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GSME_calib.bin");
-    
-    //computePSNRViolation(result, golden_output, 30);
-  }
-
-  stopProfiling();
-}
-
-
-int main(int argc, char* argv[]){
-
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GSM_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GSM_half.cc
deleted file mode 100644
index 915dc596252576fb39dca073793d618d21634509..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/pipeline_GSM_half.cc
+++ /dev/null
@@ -1,91 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  
-  printf("********* Pipeline: Gaussian - Sharpen - Motion Blur ********** \n");  
-  
-  // Start Profiling
-  startProfiling();
-
-  //long int test_batch_size = 9145;
-  long int test_batch_size = 2000;
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  
-  for(int i = 0; i < total_runs; i++){
-
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorHalfConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-    void * gaussian_out_clip = tensorHalfRelu2(gaussian_out, 0, 255);
-
-    void* sharpen_out = tensorHalfConvolution(gaussian_out_clip, sharpen_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    void * sharpen_out_clip = tensorHalfRelu2(sharpen_out, 0, 255);
-
-    void* motionblur_out = tensorHalfConvolution(sharpen_out_clip, motionblur_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-    void * result = tensorHalfRelu2(motionblur_out, 0, 255);
-
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    //hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GSM_calib.bin");
-    
-    //computePSNRViolation(result, golden_output, 30);
-    
-  }
-
-  stopProfiling();  
-}
-
-
-int main(int argc, char* argv[]){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/alexnet2_cifar10_half_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/alexnet2_cifar10_half_profiling.cc
deleted file mode 100644
index 82fe03247f36dbe6de31205a60344b7f44f85bad..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/alexnet2_cifar10_half_profiling.cc
+++ /dev/null
@@ -1,169 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../../include/utils.h"
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  printf("********* Alexnet2 CIFAR-10 DNN ********** \n");
- 
-  std::string dir_prefix = std::string("../model_params/alexnet2_cifar10/"); 
-  std::string input_path =  dir_prefix + std::string("norm_cifar_input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("test_labels.bin"); 
-
-  void* conv1_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv1.bin",
-					  float_type, 32, 3, 3, 3);  
-  void* conv1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv2.bin",
-					  float_type, 32, 32, 3, 3);  
-  void* conv2_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv2_bias.bin",
-					float_type, 1, 32, 1, 1);
-  void* conv3_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv3.bin",
-					  float_type, 64, 32, 3, 3);  
-  void* conv3_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv3_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* conv4_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv4.bin",
-					  float_type, 64, 64, 3, 3);  
-  void* conv4_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv4_bias.bin",
-					float_type, 1, 64, 1, 1);
-  void* conv5_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv5.bin",
-					  float_type, 128, 64, 3, 3);  
-  void* conv5_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv5_bias.bin",
-					float_type, 1, 128, 1, 1);
-  void* conv6_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv6.bin",
-					  float_type, 128, 128, 3, 3);  
-  void* conv6_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv6_bias.bin",
-					float_type, 1, 128, 1, 1);
-  
-  void* fc1_weights = readTrainedWeights("../model_params/alexnet2_cifar10/fc1.bin",
-					 float_type, 1, 1, 2048, 10);  
-  void* fc1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/fc1_bias.bin",
-				      float_type, 1, 10, 1, 1);  
- 
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-
-  startMemTracking();
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  int total_runs = 10;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  double total_energy = 0.0;
-
-  for (int i = 0; i < total_runs; i++){  
-	  for(int i = 0; i < batch_count; i++){
-
-		int start = i * batch_size;
-		int end = (i + 1) * batch_size;
-		void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-
-      	profiler.resume_profiler();
-		
-		void* conv1out = tensorHalfConvolution(input, conv1_filter, 1, 1, 1, 1,
-						   conv_mode, conv_precision);
-		tensorHalfAdd(conv1out, conv1_bias); 
-		void* conv1_tanh = tensorHalfTanh(conv1out);
-		
-		// 2nd Layer
-		void* conv2out = tensorHalfConvolution(conv1_tanh, conv2_filter, 1, 1, 1, 1,
-						   conv_mode, conv_precision);
-		tensorHalfAdd(conv2out, conv2_bias); 
-		void* conv2_tanh = tensorHalfTanh(conv2out);
-		void* pool2out = tensorHalfPooling(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-		 
-		// 3rd Layer
-		void* conv3out = tensorHalfConvolution(pool2out, conv3_filter, 1, 1, 1, 1,
-						   conv_mode, conv_precision);
-		tensorHalfAdd(conv3out, conv3_bias); 
-		void* conv3_tanh = tensorHalfTanh(conv3out);
-
-		// 4th Layer
-		void* conv4out = tensorHalfConvolution(conv3_tanh, conv4_filter, 1, 1, 1, 1,
-						   conv_mode, conv_precision);
-		tensorHalfAdd(conv4out, conv4_bias); 
-		void* conv4_tanh = tensorHalfTanh(conv4out);
-		void* pool4out = tensorHalfPooling(conv4_tanh, 0, 2, 2, 0, 0, 2, 2);
-		
-		// 5th Layer
-		void* conv5out = tensorHalfConvolution(pool4out, conv5_filter, 1, 1, 1, 1,
-						   conv_mode, conv_precision);
-		tensorHalfAdd(conv5out, conv5_bias); 
-		void* conv5_tanh = tensorHalfTanh(conv5out);
-
-		// 6th Layer
-		void* conv6out = tensorHalfConvolution(conv5_tanh, conv6_filter, 1, 1, 1, 1,
-						   conv_mode, conv_precision);
-		tensorHalfAdd(conv6out, conv6_bias); 
-	  
-		void* conv6_tanh = tensorHalfTanh(conv6out);
-		void* pool6out = tensorHalfPooling(conv6_tanh, 0, 2, 2, 0, 0, 2, 2);
-		
-		// final FC Layer
-		void* gemm1out = tensorHalfGemmGPU(pool6out, fc1_weights);  
-		void* gemm1biasout = tensorHalfAdd(gemm1out, fc1_bias);
-		void* result = tensorSoftmax(gemm1biasout);
-
-		profiler.pause_profiler();
-		auto time_energy = profiler.get_time_energy();
-		total_time += time_energy.first;
-		total_energy += time_energy.second;
-
-        profiler.reset();
-
-		uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-		float accuracy = computeAccuracy2(labels, batch_size, result); 
-		final_accuracy += accuracy;
-		
-    	freeBatchMemory();
-    }
-  }
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-
-}
-
-
-int main(int argc, char* argv[]){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/alexnet_cifar10_half_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/alexnet_cifar10_half_profiling.cc
deleted file mode 100644
index 965e3170ea5c9df7dec1abe13d06581fe56f3b21..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/alexnet_cifar10_half_profiling.cc
+++ /dev/null
@@ -1,126 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/alexnet_cifar10_front/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv0.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv_bias0.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv3.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv_bias3.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv6.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv_bias6.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv7.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv_bias7.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv8.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv_bias8.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("fc12.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("fc_bias12.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  int total_runs = 10;
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  double total_energy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  for (int i = 0; i < total_runs; i++){  
-      for(int i = 0; i < batch_count; i++){
-
-        int start = i * batch_size;
-        int end = (i + 1) * batch_size;
-        void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);    
-
-        profiler.resume_profiler();
-        void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 5, 5, 1, 1, 1, 0); 
-        void* var_1 = tensorHalfAdd(var_0, conv2d_1_b); 
-        void* var_2 = tensorHalfTanh(var_1); 
-        void* var_3 = tensorHalfPooling(var_2,0,2,2,0,0,2,2); 
-        void* var_5 = tensorHalfConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 0); 
-        void* var_6 = tensorHalfAdd(var_5, conv2d_2_b); 
-        void* var_7 = tensorHalfTanh(var_6); 
-        void* var_8 = tensorHalfPooling(var_7,0,2,2,0,0,2,2); 
-        void* var_10 = tensorHalfConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-        void* var_11 = tensorHalfAdd(var_10, conv2d_3_b); 
-        void* var_12 = tensorHalfTanh(var_11); 
-        void* var_13 = tensorHalfConvolution(var_12, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-        void* var_14 = tensorHalfAdd(var_13, conv2d_4_b); 
-        void* var_15 = tensorHalfTanh(var_14); 
-        void* var_16 = tensorHalfConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-        void* var_17 = tensorHalfAdd(var_16, conv2d_5_b); 
-        void* var_18 = tensorHalfTanh(var_17); 
-        void* var_19 = tensorHalfPooling(var_18,0,2,2,0,0,2,2); 
-        void* var_22 = tensorHalfGemmGPU(var_19, dense_1_w); 
-        void* var_23 = tensorHalfAdd(var_22, dense_1_b); 
-        void* var_24 = tensorSoftmax(var_23); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        total_energy += time_energy.second;
-        profiler.reset();
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-        float accuracy = computeAccuracy2(labels,batch_size,var_24); 
-        final_accuracy += accuracy;
-        
-        freeBatchMemory();
-      }
-  }
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/lenet_keras_half_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/lenet_keras_half_profiling.cc
deleted file mode 100644
index e6ffd6b03de4901780511e56afdb5faac85bb807..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/lenet_keras_half_profiling.cc
+++ /dev/null
@@ -1,186 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  int total_runs = 10;
-
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_keras/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_keras/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_keras/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_keras/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_keras/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet_keras/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_keras/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet_keras/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  
-  clearTensorMap();
- 
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  float final_accuracy = 0.0;
-
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    profiler.resume_profiler();
-    startProfiling();
-
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorHalfConvolution(input, conv1_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-
-    // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-    tensorHalfAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* pool1out = tensorHalfPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv1_tanh = tensorHalfTanh(pool1out);
-
-    // NOTE: input channels have to match between tensor op inputs and outputs 
-    void* conv2out = tensorHalfConvolution(conv1_tanh, conv2_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-    tensorHalfAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* pool2out = tensorHalfPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv2_tanh = tensorHalfTanh(pool2out);
-
-    void* gemm1out = tensorHalfGemm(conv2_tanh, fc1_weights);  
-
-    void* gemm1biasout = tensorHalfAdd(gemm1out, fc1_bias);
-
-    void* tanh1out = tensorHalfTanh(gemm1biasout);
-  
-    void* gemm2out = tensorHalfGemm(tanh1out, fc2_weights);  
-  
-    void* gemm2_biasout = tensorHalfAdd(gemm2out, fc2_bias);
-
-    void* tanh2out = tensorHalfTanh(gemm2_biasout);
-  
-    void* result = tensorSoftmax(tanh2out);
-
-    profiler.pause_profiler();
-    auto time_energy = profiler.get_time_energy();
-    total_time += time_energy.first;
-    profiler.reset();
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    float accuracy = computeAccuracy2(labels, test_batch_size, result);
-    final_accuracy += accuracy;
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  final_accuracy = final_accuracy / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_depthwise_half_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_depthwise_half_profiling.cc
deleted file mode 100644
index 641047b50dc1219f1d02bbfb75e2014840c90d96..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_depthwise_half_profiling.cc
+++ /dev/null
@@ -1,416 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-
-#include "../../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../../include/utils.h"
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/mobilenet/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 1000;  
-  int batch_count = test_input_size / batch_size; 
-
-  int total_runs = 10;
-  float final_accuracy = 0.0; 
-
-  for (int run_num = 0; run_num < total_runs; run_num++){
-      for(int i = 0; i < batch_count; i++){ 
-
-        int start = i * batch_size; 
-        int end = (i + 1) * batch_size; 
-
-        void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-        void* var_1 = tensorHalfBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-        void* var_2 = tensorHalfRelu(var_1); 
-        void* var_4 = tensorHalfConvCutlass(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-        void* var_5 = tensorHalfBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-        void* var_6 = tensorHalfRelu(var_5); 
-        void* var_7 = tensorHalfConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-        void* var_8 = tensorHalfBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-        void* var_9 = tensorHalfRelu(var_8); 
-        void* var_11 = tensorHalfConvCutlass(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-        void* var_12 = tensorHalfBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-        void* var_13 = tensorHalfRelu(var_12); 
-        void* var_14 = tensorHalfConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-        void* var_15 = tensorHalfBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-        void* var_16 = tensorHalfRelu(var_15); 
-        void* var_18 = tensorHalfConvCutlass(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-        void* var_19 = tensorHalfBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-        void* var_20 = tensorHalfRelu(var_19); 
-        void* var_21 = tensorHalfConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-        void* var_22 = tensorHalfBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-        void* var_23 = tensorHalfRelu(var_22); 
-        void* var_26 = tensorHalfConvCutlass(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-        void* var_27 = tensorHalfBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-        void* var_28 = tensorHalfRelu(var_27); 
-        void* var_29 = tensorHalfConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-        void* var_30 = tensorHalfBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-        void* var_31 = tensorHalfRelu(var_30); 
-        void* var_33 = tensorHalfConvCutlass(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-        void* var_34 = tensorHalfBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-        void* var_35 = tensorHalfRelu(var_34); 
-        void* var_36 = tensorHalfConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-        void* var_37 = tensorHalfBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-        void* var_38 = tensorHalfRelu(var_37); 
-        void* var_41 = tensorHalfConvCutlass(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-        void* var_42 = tensorHalfBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-        void* var_43 = tensorHalfRelu(var_42); 
-        void* var_44 = tensorHalfConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-        void* var_45 = tensorHalfBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-        void* var_46 = tensorHalfRelu(var_45); 
-        void* var_48 = tensorHalfConvCutlass(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-        void* var_49 = tensorHalfBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-        void* var_50 = tensorHalfRelu(var_49); 
-        void* var_51 = tensorHalfConvolution(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-        void* var_52 = tensorHalfBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-        void* var_53 = tensorHalfRelu(var_52); 
-        void* var_55 = tensorHalfConvCutlass(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-        void* var_56 = tensorHalfBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-        void* var_57 = tensorHalfRelu(var_56); 
-        void* var_58 = tensorHalfConvolution(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-        void* var_59 = tensorHalfBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-        void* var_60 = tensorHalfRelu(var_59); 
-        void* var_63 = tensorHalfConvCutlass(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-        void* var_64 = tensorHalfBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-        void* var_65 = tensorHalfRelu(var_64); 
-        void* var_66 = tensorHalfConvolution(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-        void* var_67 = tensorHalfBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-        void* var_68 = tensorHalfRelu(var_67); 
-        void* var_70 = tensorHalfConvCutlass(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-        void* var_71 = tensorHalfBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-        void* var_72 = tensorHalfRelu(var_71); 
-        void* var_73 = tensorHalfConvolution(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-        void* var_74 = tensorHalfBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-        void* var_75 = tensorHalfRelu(var_74); 
-        void* var_77 = tensorHalfConvCutlass(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-        void* var_78 = tensorHalfBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-        void* var_79 = tensorHalfRelu(var_78); 
-        void* var_80 = tensorHalfConvolution(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-        void* var_81 = tensorHalfBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-        void* var_82 = tensorHalfRelu(var_81); 
-        void* var_85 = tensorHalfConvCutlass(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-        void* var_86 = tensorHalfBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-        void* var_87 = tensorHalfRelu(var_86); 
-        void* var_88 = tensorHalfConvolution(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-        void* var_89 = tensorHalfBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-        void* var_90 = tensorHalfRelu(var_89); 
-        void* var_92 = tensorHalfConvCutlass(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-        void* var_93 = tensorHalfBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-        void* var_94 = tensorHalfRelu(var_93); 
-        void* var_95 = tensorHalfConvolution(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-        void* var_96 = tensorHalfBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-        void* var_97 = tensorHalfRelu(var_96); 
-        void* var_99 = tensorHalfPooling(var_97,1,2,2,0,0,2,2); 
-        void* var_101 = tensorHalfGemmGPU(var_99, dense_1_w); 
-        void* var_102 = tensorHalfAdd(var_101, dense_1_b); 
-        void* var_103 = tensorSoftmax(var_102); 
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-        float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-        final_accuracy += accuracy; 
-        freeBatchMemory(); 
-      }
-  }
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_half_cifar10_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_half_cifar10_profiling.cc
deleted file mode 100644
index 1c6a3955b1ad644363947106bb0f77d6b9a77050..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_half_cifar10_profiling.cc
+++ /dev/null
@@ -1,438 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-
-#include "../../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../../include/utils.h"
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_quant/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking(); 
-
-  startProfiling();
-  
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  int total_runs = 10;
-
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-
-  for(int i = 0; i < total_runs; i++){
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    profiler.resume_profiler();
-    void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-    void* var_1 = tensorHalfBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_2 = tensorHalfRelu(var_1); 
-    void* var_4 = tensorHalfConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_5 = tensorHalfBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_6 = tensorHalfRelu(var_5); 
-    void* var_7 = tensorHalfConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-    void* var_8 = tensorHalfBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-    void* var_9 = tensorHalfRelu(var_8); 
-    void* var_11 = tensorHalfConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-    void* var_12 = tensorHalfBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-    void* var_13 = tensorHalfRelu(var_12); 
-    void* var_14 = tensorHalfConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-    void* var_15 = tensorHalfBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-    void* var_16 = tensorHalfRelu(var_15); 
-    void* var_18 = tensorHalfConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-    void* var_19 = tensorHalfBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-    void* var_20 = tensorHalfRelu(var_19); 
-    void* var_21 = tensorHalfConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-    void* var_22 = tensorHalfBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-    void* var_23 = tensorHalfRelu(var_22); 
-    void* var_26 = tensorHalfConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-    void* var_27 = tensorHalfBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-    void* var_28 = tensorHalfRelu(var_27); 
-    void* var_29 = tensorHalfConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-    void* var_30 = tensorHalfBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-    void* var_31 = tensorHalfRelu(var_30); 
-    void* var_33 = tensorHalfConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-    void* var_34 = tensorHalfBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-    void* var_35 = tensorHalfRelu(var_34); 
-    void* var_36 = tensorHalfConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-    void* var_37 = tensorHalfBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-    void* var_38 = tensorHalfRelu(var_37); 
-    void* var_41 = tensorHalfConvolution(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-    void* var_42 = tensorHalfBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-    void* var_43 = tensorHalfRelu(var_42); 
-    void* var_44 = tensorHalfConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-    void* var_45 = tensorHalfBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-    void* var_46 = tensorHalfRelu(var_45); 
-    void* var_48 = tensorHalfConvolution(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-    void* var_49 = tensorHalfBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-    void* var_50 = tensorHalfRelu(var_49); 
-    void* var_51 = tensorHalfConvolution(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-    void* var_52 = tensorHalfBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-    void* var_53 = tensorHalfRelu(var_52); 
-    void* var_55 = tensorHalfConvolution(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-    void* var_56 = tensorHalfBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-    void* var_57 = tensorHalfRelu(var_56); 
-    void* var_58 = tensorHalfConvolution(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-    void* var_59 = tensorHalfBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-    void* var_60 = tensorHalfRelu(var_59); 
-    void* var_63 = tensorHalfConvolution(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-    void* var_64 = tensorHalfBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-    void* var_65 = tensorHalfRelu(var_64); 
-    void* var_66 = tensorHalfConvolution(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-    void* var_67 = tensorHalfBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-    void* var_68 = tensorHalfRelu(var_67); 
-    void* var_70 = tensorHalfConvolution(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-    void* var_71 = tensorHalfBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-    void* var_72 = tensorHalfRelu(var_71); 
-    void* var_73 = tensorHalfConvolution(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-    void* var_74 = tensorHalfBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-    void* var_75 = tensorHalfRelu(var_74); 
-    void* var_77 = tensorHalfConvolution(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-    void* var_78 = tensorHalfBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-    void* var_79 = tensorHalfRelu(var_78); 
-    void* var_80 = tensorHalfConvolution(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-    void* var_81 = tensorHalfBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-    void* var_82 = tensorHalfRelu(var_81); 
-    void* var_85 = tensorHalfConvolution(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-    void* var_86 = tensorHalfBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-    void* var_87 = tensorHalfRelu(var_86); 
-    void* var_88 = tensorHalfConvolution(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-    void* var_89 = tensorHalfBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-    void* var_90 = tensorHalfRelu(var_89); 
-    void* var_92 = tensorHalfConvolution(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-    void* var_93 = tensorHalfBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-    void* var_94 = tensorHalfRelu(var_93); 
-    void* var_95 = tensorHalfConvolution(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-    void* var_96 = tensorHalfBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-    void* var_97 = tensorHalfRelu(var_96); 
-    void* var_99 = tensorHalfPooling(var_97,1,2,2,0,0,2,2); 
-    void* var_101 = tensorHalfGemmGPU(var_99, dense_1_w); 
-    void* var_102 = tensorHalfAdd(var_101, dense_1_b); 
-    void* var_103 = tensorSoftmax(var_102); 
-
-      profiler.pause_profiler();
-      auto time_energy = profiler.get_time_energy();
-      total_time += time_energy.first;
-      profiler.reset();
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-  }
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  stopProfiling();
-  
-  final_accuracy = final_accuracy / batch_count / total_runs; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_shallow_depthwise_half_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_shallow_depthwise_half_profiling.cc
deleted file mode 100644
index f68eb1793b66b0579f2ed6dbff26a56677f2aa95..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_shallow_depthwise_half_profiling.cc
+++ /dev/null
@@ -1,249 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-
-#include "../../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../../include/utils.h"
-
-
-int main(int argc, char* argv[]){ 
-
-  int total_runs = 10;
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  
-  llvm_hpvm_initTensorRt(0); 
-
-  //std::string dir_prefix = std::string("../../keras/data/mobilenet_shallow_nathan/");
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_shallow/");
-
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 1000; 
-  int batch_count = test_input_size / batch_size; 
-
-
-  float final_accuracy = 0.0;
-
-  for(int j = 0; j < total_runs; j++){    
-    for(int i = 0; i < batch_count; i++){ 
-
-      int start = i * batch_size; 
-      int end = (i + 1) * batch_size; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-      void* var_1 = tensorHalfBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorHalfRelu(var_1); 
-      void* var_4 = tensorHalfConvCutlass(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_5 = tensorHalfBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_6 = tensorHalfRelu(var_5); 
-      void* var_7 = tensorHalfConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-      void* var_8 = tensorHalfBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-      void* var_9 = tensorHalfRelu(var_8); 
-      void* var_11 = tensorHalfConvCutlass(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-      void* var_12 = tensorHalfBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-      void* var_13 = tensorHalfRelu(var_12); 
-      void* var_14 = tensorHalfConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-      void* var_15 = tensorHalfBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-      void* var_16 = tensorHalfRelu(var_15); 
-      void* var_18 = tensorHalfConvCutlass(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-      void* var_19 = tensorHalfBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-      void* var_20 = tensorHalfRelu(var_19); 
-      void* var_21 = tensorHalfConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-      void* var_22 = tensorHalfBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-      void* var_23 = tensorHalfRelu(var_22); 
-      void* var_26 = tensorHalfConvCutlass(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-      void* var_27 = tensorHalfBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-      void* var_28 = tensorHalfRelu(var_27); 
-      void* var_29 = tensorHalfConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-      void* var_30 = tensorHalfBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-      void* var_31 = tensorHalfRelu(var_30); 
-      void* var_33 = tensorHalfConvCutlass(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-      void* var_34 = tensorHalfBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-      void* var_35 = tensorHalfRelu(var_34); 
-      void* var_36 = tensorHalfConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-      void* var_37 = tensorHalfBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-      void* var_38 = tensorHalfRelu(var_37); 
-      void* var_41 = tensorHalfConvCutlass(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-      void* var_42 = tensorHalfBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-      void* var_43 = tensorHalfRelu(var_42); 
-      void* var_44 = tensorHalfConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-      void* var_45 = tensorHalfBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-      void* var_46 = tensorHalfRelu(var_45); 
-      void* var_47 = tensorHalfPooling(var_46,1,2,2,0,0,2,2); 
-      void* var_49 = tensorHalfGemmGPU(var_47, dense_1_w); 
-      void* var_50 = tensorHalfAdd(var_49, dense_1_b); 
-      void* var_51 = tensorSoftmax(var_50); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_51); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    //final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy / batch_count); 
-  }
-
-  final_accuracy = final_accuracy / batch_count / total_runs; 
-  dumpFinalAccuracy(final_accuracy);
-
-  dumpExecutionAccuracies();
-    
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_shallow_half_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_shallow_half_profiling.cc
deleted file mode 100644
index c641db1a05efe44d4801da1ebdcaf2ae8945e7f2..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/mobilenet_shallow_half_profiling.cc
+++ /dev/null
@@ -1,225 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-
-#include "../../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../../include/utils.h"
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_shallow/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  int total_runs = 10;
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-
-  for(int i = 0; i < total_runs; i++){
-	  for(int i = 0; i < batch_count; i++){ 
-
-		int start = i * batch_size; 
-		int end = (i + 1) * batch_size; 
-
-		void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        profiler.resume_profiler();
-
-		void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-		void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-		void* var_2 = tensorHalfRelu(var_1); 
-		void* var_4 = tensorHalfConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-		void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-		void* var_6 = tensorHalfRelu(var_5); 
-		void* var_7 = tensorHalfConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-		void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-		void* var_9 = tensorHalfRelu(var_8); 
-		void* var_11 = tensorHalfConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-		void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-		void* var_13 = tensorHalfRelu(var_12); 
-		void* var_14 = tensorHalfConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-		void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-		void* var_16 = tensorHalfRelu(var_15); 
-		void* var_18 = tensorHalfConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 2, 2, 1, 64); 
-		void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-		void* var_20 = tensorHalfRelu(var_19); 
-		void* var_21 = tensorHalfConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-		void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-		void* var_23 = tensorHalfRelu(var_22); 
-		void* var_26 = tensorHalfConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-		void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-		void* var_28 = tensorHalfRelu(var_27); 
-		void* var_29 = tensorHalfConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-		void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-		void* var_31 = tensorHalfRelu(var_30); 
-		void* var_33 = tensorHalfConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-		void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-		void* var_35 = tensorHalfRelu(var_34); 
-		void* var_36 = tensorHalfConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-		void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-		void* var_38 = tensorHalfRelu(var_37); 
-		void* var_40 = tensorHalfPooling(var_38,1,2,2,0,0,2,2); 
-		void* var_42 = tensorHalfGemmGPU(var_40, dense_1_w); 
-		void* var_43 = tensorHalfAdd(var_42, dense_1_b); 
-		void* var_44 = tensorSoftmax(var_43); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        profiler.reset();
-
-		uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-		float accuracy = computeAccuracy2(labels, batch_size, var_44); 
-		final_accuracy += accuracy; 
-		freeBatchMemory(); 
-	  } 
-  }
-
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  final_accuracy = final_accuracy / batch_count / total_runs; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/resnet18_cifar10_half_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/resnet18_cifar10_half_profiling.cc
deleted file mode 100644
index f91814e8390a400159467298a3702147cbf2f4b3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/resnet18_cifar10_half_profiling.cc
+++ /dev/null
@@ -1,242 +0,0 @@
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-
-#include "../../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../../include/utils.h"
-
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-  
-  std::string dir_prefix = std::string("../model_params/resnet18_cifar10_3/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0, batch_size,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(), batch_size); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-  void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-  std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-  void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-  std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-  void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-  void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-  void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-  void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-  void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-  void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  int total_runs = 10; //100;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-
-  Profiler profiler;
-  profiler.start_profiler();
-  double total_time = 0.0;
-
-  for (int itrs = 0; itrs < total_runs; itrs++){ 
-      for(int i = 0; i < batch_count; i++){
-
-        int start = i * batch_size;
-        int end = (i + 1) * batch_size;
-        
-        void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-
-        profiler.resume_profiler();
-        
-        void* var_2 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-        void* var_3 = tensorHalfAdd(var_2, conv2d_1_b); 
-        void* var_4 = tensorHalfRelu(var_3); 
-        void* var_6 = tensorHalfConvolution(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-        void* var_7 = tensorHalfAdd(var_6, conv2d_2_b); 
-        void* var_8 = tensorHalfRelu(var_7); 
-        void* var_10 = tensorHalfConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-        void* var_11 = tensorHalfAdd(var_10, conv2d_3_b); 
-        void* var_12 = tensorHalfAdd(var_4, var_11); 
-        void* var_13 = tensorHalfRelu(var_12); 
-        void* var_15 = tensorHalfConvolution(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-        void* var_16 = tensorHalfAdd(var_15, conv2d_4_b); 
-        void* var_17 = tensorHalfRelu(var_16); 
-        void* var_19 = tensorHalfConvolution(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-        void* var_20 = tensorHalfAdd(var_19, conv2d_5_b); 
-        void* var_21 = tensorHalfAdd(var_13, var_20); 
-        void* var_22 = tensorHalfRelu(var_21); 
-        void* var_24 = tensorHalfConvolution(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-        void* var_25 = tensorHalfAdd(var_24, conv2d_6_b); 
-        void* var_26 = tensorHalfRelu(var_25); 
-        void* var_28 = tensorHalfConvolution(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-        void* var_29 = tensorHalfAdd(var_28, conv2d_7_b); 
-        void* var_30 = tensorHalfAdd(var_22, var_29); 
-        void* var_31 = tensorHalfRelu(var_30); 
-        void* var_33 = tensorHalfConvolution(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-        void* var_34 = tensorHalfAdd(var_33, conv2d_8_b); 
-        void* var_35 = tensorHalfRelu(var_34); 
-        void* var_37 = tensorHalfConvolution(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-        void* var_38 = tensorHalfAdd(var_37, conv2d_9_b); 
-        void* var_40 = tensorHalfConvolution(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-        void* var_41 = tensorHalfAdd(var_40, conv2d_10_b); 
-        void* var_42 = tensorHalfAdd(var_41, var_38); 
-        void* var_43 = tensorHalfRelu(var_42); 
-        void* var_45 = tensorHalfConvolution(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-        void* var_46 = tensorHalfAdd(var_45, conv2d_11_b); 
-        void* var_47 = tensorHalfRelu(var_46); 
-        void* var_49 = tensorHalfConvolution(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-        void* var_50 = tensorHalfAdd(var_49, conv2d_12_b); 
-        void* var_51 = tensorHalfAdd(var_43, var_50); 
-        void* var_52 = tensorHalfRelu(var_51); 
-        void* var_54 = tensorHalfConvolution(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-        void* var_55 = tensorHalfAdd(var_54, conv2d_13_b); 
-        void* var_56 = tensorHalfRelu(var_55); 
-        void* var_58 = tensorHalfConvolution(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-        void* var_59 = tensorHalfAdd(var_58, conv2d_14_b); 
-        void* var_60 = tensorHalfAdd(var_52, var_59); 
-        void* var_61 = tensorHalfRelu(var_60); 
-        void* var_63 = tensorHalfConvolution(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-        void* var_64 = tensorHalfAdd(var_63, conv2d_15_b); 
-        void* var_65 = tensorHalfRelu(var_64); 
-        void* var_67 = tensorHalfConvolution(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-        void* var_68 = tensorHalfAdd(var_67, conv2d_16_b); 
-        void* var_70 = tensorHalfConvolution(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-        void* var_71 = tensorHalfAdd(var_70, conv2d_17_b); 
-        void* var_72 = tensorHalfAdd(var_71, var_68); 
-        void* var_73 = tensorHalfRelu(var_72); 
-        void* var_75 = tensorHalfConvolution(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-        void* var_76 = tensorHalfAdd(var_75, conv2d_18_b); 
-        void* var_77 = tensorHalfRelu(var_76); 
-        void* var_79 = tensorHalfConvolution(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-        void* var_80 = tensorHalfAdd(var_79, conv2d_19_b); 
-        void* var_81 = tensorHalfAdd(var_73, var_80); 
-        void* var_82 = tensorHalfRelu(var_81); 
-        void* var_84 = tensorHalfConvolution(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-        void* var_85 = tensorHalfAdd(var_84, conv2d_20_b); 
-        void* var_86 = tensorHalfRelu(var_85); 
-        void* var_88 = tensorHalfConvolution(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-        void* var_89 = tensorHalfAdd(var_88, conv2d_21_b); 
-        void* var_90 = tensorHalfAdd(var_82, var_89); 
-        void* var_91 = tensorHalfRelu(var_90); 
-        void* var_92 = tensorHalfPooling(var_91,1,8,8,0,0,8,8); 
-        void* var_94 = tensorHalfGemmGPU(var_92, dense_1_w); 
-        void* var_95 = tensorHalfAdd(var_94, dense_1_b); 
-        void* var_96 = tensorSoftmax(var_95); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        profiler.reset();
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-        float accuracy = computeAccuracy2(labels,batch_size,var_96); 
-        final_accuracy += accuracy;
-        
-        freeBatchMemory();
-      }
-  }
-  stopProfiling();
-
-  profiler.stop_profiler();
-
-  final_accuracy = final_accuracy / batch_count / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/vgg16_cifar100_half_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/vgg16_cifar100_half_profiling.cc
deleted file mode 100644
index b778b1720c8a2db2f90230c3e57d0e0928f8665b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/vgg16_cifar100_half_profiling.cc
+++ /dev/null
@@ -1,182 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-
-#include "../../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../../include/utils.h"
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 1000; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  int total_runs = 10;
-  Profiler profiler;
-  profiler.start_profiler();
-  double total_time = 0.0;
-
-  for (int i = 0; i < total_runs; i++){
-	  for(int i = 0; i < batch_count; i++){ 
-
-		int start = i * batch_size; 
-		int end = (i + 1) * batch_size; 
-
-		void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        profiler.resume_profiler();
-
-		void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-		void* var_1 = tensorHalfAdd(var_0, conv2d_1_b); 
-		void* var_2 = tensorHalfRelu(var_1); 
-		void* var_4 = tensorHalfConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-		void* var_5 = tensorHalfAdd(var_4, conv2d_2_b); 
-		void* var_6 = tensorHalfRelu(var_5); 
-		void* var_7 = tensorHalfPooling(var_6,0,2,2,0,0,2,2); 
-		void* var_8 = tensorHalfConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-		void* var_9 = tensorHalfAdd(var_8, conv2d_3_b); 
-		void* var_10 = tensorHalfRelu(var_9); 
-		void* var_12 = tensorHalfConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-		void* var_13 = tensorHalfAdd(var_12, conv2d_4_b); 
-		void* var_14 = tensorHalfRelu(var_13); 
-		void* var_15 = tensorHalfPooling(var_14,0,2,2,0,0,2,2); 
-		void* var_16 = tensorHalfConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-		void* var_17 = tensorHalfAdd(var_16, conv2d_5_b); 
-		void* var_18 = tensorHalfRelu(var_17); 
-		void* var_20 = tensorHalfConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-		void* var_21 = tensorHalfAdd(var_20, conv2d_6_b); 
-		void* var_22 = tensorHalfRelu(var_21); 
-		void* var_24 = tensorHalfConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-		void* var_25 = tensorHalfAdd(var_24, conv2d_7_b); 
-		void* var_26 = tensorHalfRelu(var_25); 
-		void* var_27 = tensorHalfPooling(var_26,0,2,2,0,0,2,2); 
-		void* var_28 = tensorHalfConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-		void* var_29 = tensorHalfAdd(var_28, conv2d_8_b); 
-		void* var_30 = tensorHalfRelu(var_29); 
-		void* var_32 = tensorHalfConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-		void* var_33 = tensorHalfAdd(var_32, conv2d_9_b); 
-		void* var_34 = tensorHalfRelu(var_33); 
-		void* var_36 = tensorHalfConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-		void* var_37 = tensorHalfAdd(var_36, conv2d_10_b); 
-		void* var_38 = tensorHalfRelu(var_37); 
-		void* var_39 = tensorHalfPooling(var_38,0,2,2,0,0,2,2); 
-		void* var_40 = tensorHalfConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-		void* var_41 = tensorHalfAdd(var_40, conv2d_11_b); 
-		void* var_42 = tensorHalfRelu(var_41); 
-		void* var_44 = tensorHalfConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-		void* var_45 = tensorHalfAdd(var_44, conv2d_12_b); 
-		void* var_46 = tensorHalfRelu(var_45); 
-		void* var_48 = tensorHalfConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-		void* var_49 = tensorHalfAdd(var_48, conv2d_13_b); 
-		void* var_50 = tensorHalfRelu(var_49); 
-		void* var_51 = tensorHalfPooling(var_50,0,2,2,0,0,2,2); 
-		void* var_54 = tensorHalfGemmGPU(var_51, dense_1_w); 
-		void* var_55 = tensorHalfAdd(var_54, dense_1_b); 
-		void* var_56 = tensorHalfRelu(var_55); 
-		void* var_58 = tensorHalfGemmGPU(var_56, dense_2_w); 
-		void* var_59 = tensorHalfAdd(var_58, dense_2_b); 
-		void* var_60 = tensorSoftmax(var_59); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        profiler.reset();
-
-		uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-		float accuracy = computeAccuracy2(labels, batch_size, var_60, 100); 
-		final_accuracy += accuracy; 
-		freeBatchMemory(); 
-	 
-	  }
-  }
-
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  final_accuracy = final_accuracy / batch_count / total_runs; 
-  dumpFinalAccuracy(final_accuracy); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/vgg16_cifar10_half_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/vgg16_cifar10_half_profiling.cc
deleted file mode 100644
index 3f97e5dbde3b6d124888a8c74d435880097a394c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/profiling/vgg16_cifar10_half_profiling.cc
+++ /dev/null
@@ -1,189 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h>
-
-#include "../../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../../include/utils.h"
-
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar10_2/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  int total_runs = 10;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  for (int itrs = 0; itrs < total_runs; itrs++){
-      for(int i = 0; i < batch_count; i++){
-
-        int start = i * batch_size;
-        int end = (i + 1) * batch_size;
-        
-        void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32); 
-    
-        profiler.resume_profiler();
- 
-        void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-        void* var_1 = tensorHalfAdd(var_0, conv2d_1_b); 
-        void* var_2 = tensorHalfRelu(var_1); 
-        void* var_4 = tensorHalfConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-        void* var_5 = tensorHalfAdd(var_4, conv2d_2_b); 
-        void* var_6 = tensorHalfRelu(var_5); 
-        void* var_7 = tensorHalfPooling(var_6,0,2,2,0,0,2,2); 
-        void* var_8 = tensorHalfConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-        void* var_9 = tensorHalfAdd(var_8, conv2d_3_b); 
-        void* var_10 = tensorHalfRelu(var_9); 
-        void* var_12 = tensorHalfConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-        void* var_13 = tensorHalfAdd(var_12, conv2d_4_b); 
-        void* var_14 = tensorHalfRelu(var_13); 
-        void* var_15 = tensorHalfPooling(var_14,0,2,2,0,0,2,2); 
-        void* var_16 = tensorHalfConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-        void* var_17 = tensorHalfAdd(var_16, conv2d_5_b); 
-        void* var_18 = tensorHalfRelu(var_17); 
-        void* var_20 = tensorHalfConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-        void* var_21 = tensorHalfAdd(var_20, conv2d_6_b); 
-        void* var_22 = tensorHalfRelu(var_21); 
-        void* var_24 = tensorHalfConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-        void* var_25 = tensorHalfAdd(var_24, conv2d_7_b); 
-        void* var_26 = tensorHalfRelu(var_25); 
-        void* var_27 = tensorHalfPooling(var_26,0,2,2,0,0,2,2); 
-        void* var_28 = tensorHalfConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-        void* var_29 = tensorHalfAdd(var_28, conv2d_8_b); 
-        void* var_30 = tensorHalfRelu(var_29); 
-        void* var_32 = tensorHalfConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-        void* var_33 = tensorHalfAdd(var_32, conv2d_9_b); 
-        void* var_34 = tensorHalfRelu(var_33); 
-        void* var_36 = tensorHalfConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-        void* var_37 = tensorHalfAdd(var_36, conv2d_10_b); 
-        void* var_38 = tensorHalfRelu(var_37); 
-        void* var_39 = tensorHalfPooling(var_38,0,2,2,0,0,2,2); 
-        void* var_40 = tensorHalfConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-        void* var_41 = tensorHalfAdd(var_40, conv2d_11_b); 
-        void* var_42 = tensorHalfRelu(var_41); 
-        void* var_44 = tensorHalfConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-        void* var_45 = tensorHalfAdd(var_44, conv2d_12_b); 
-        void* var_46 = tensorHalfRelu(var_45); 
-        void* var_48 = tensorHalfConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-        void* var_49 = tensorHalfAdd(var_48, conv2d_13_b); 
-        void* var_50 = tensorHalfRelu(var_49); 
-        void* var_51 = tensorHalfPooling(var_50,0,2,2,0,0,2,2); 
-        void* var_54 = tensorHalfGemmGPU(var_51, dense_1_w); 
-        void* var_55 = tensorHalfAdd(var_54, dense_1_b); 
-        void* var_56 = tensorHalfRelu(var_55); 
-        void* var_58 = tensorHalfGemmGPU(var_56, dense_2_w); 
-        void* var_59 = tensorHalfAdd(var_58, dense_2_b); 
-        void* var_60 = tensorSoftmax(var_59); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        profiler.reset();
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-        float accuracy = computeAccuracy2(labels,batch_size,var_60); 
-        final_accuracy += accuracy;
-
-        freeBatchMemory();
-      }
-  }
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  profiler.stop_profiler();
-  // Start power and performance profiling 
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/resnet18_cifar10_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/resnet18_cifar10_half.cc
deleted file mode 100644
index fd1ef968591b61be66373e5b959605ce3dafb681..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/resnet18_cifar10_half.cc
+++ /dev/null
@@ -1,221 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-  
-  std::string dir_prefix = std::string("../model_params/resnet18_cifar10_3/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0, batch_size,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(), batch_size); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-  void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-  std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-  void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-  std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-  void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-  void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-  void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-  void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-  void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-  void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 2000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-    
-    void* var_2 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-    void* var_3 = tensorHalfAdd(var_2, conv2d_1_b); 
-    void* var_4 = tensorHalfRelu(var_3); 
-    void* var_6 = tensorHalfConvolution(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-    void* var_7 = tensorHalfAdd(var_6, conv2d_2_b); 
-    void* var_8 = tensorHalfRelu(var_7); 
-    void* var_10 = tensorHalfConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_11 = tensorHalfAdd(var_10, conv2d_3_b); 
-    void* var_12 = tensorHalfAdd(var_4, var_11); 
-    void* var_13 = tensorHalfRelu(var_12); 
-    void* var_15 = tensorHalfConvolution(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_16 = tensorHalfAdd(var_15, conv2d_4_b); 
-    void* var_17 = tensorHalfRelu(var_16); 
-    void* var_19 = tensorHalfConvolution(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_20 = tensorHalfAdd(var_19, conv2d_5_b); 
-    void* var_21 = tensorHalfAdd(var_13, var_20); 
-    void* var_22 = tensorHalfRelu(var_21); 
-    void* var_24 = tensorHalfConvolution(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-    void* var_25 = tensorHalfAdd(var_24, conv2d_6_b); 
-    void* var_26 = tensorHalfRelu(var_25); 
-    void* var_28 = tensorHalfConvolution(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-    void* var_29 = tensorHalfAdd(var_28, conv2d_7_b); 
-    void* var_30 = tensorHalfAdd(var_22, var_29); 
-    void* var_31 = tensorHalfRelu(var_30); 
-    void* var_33 = tensorHalfConvolution(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-    void* var_34 = tensorHalfAdd(var_33, conv2d_8_b); 
-    void* var_35 = tensorHalfRelu(var_34); 
-    void* var_37 = tensorHalfConvolution(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-    void* var_38 = tensorHalfAdd(var_37, conv2d_9_b); 
-    void* var_40 = tensorHalfConvolution(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-    void* var_41 = tensorHalfAdd(var_40, conv2d_10_b); 
-    void* var_42 = tensorHalfAdd(var_41, var_38); 
-    void* var_43 = tensorHalfRelu(var_42); 
-    void* var_45 = tensorHalfConvolution(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-    void* var_46 = tensorHalfAdd(var_45, conv2d_11_b); 
-    void* var_47 = tensorHalfRelu(var_46); 
-    void* var_49 = tensorHalfConvolution(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-    void* var_50 = tensorHalfAdd(var_49, conv2d_12_b); 
-    void* var_51 = tensorHalfAdd(var_43, var_50); 
-    void* var_52 = tensorHalfRelu(var_51); 
-    void* var_54 = tensorHalfConvolution(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-    void* var_55 = tensorHalfAdd(var_54, conv2d_13_b); 
-    void* var_56 = tensorHalfRelu(var_55); 
-    void* var_58 = tensorHalfConvolution(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-    void* var_59 = tensorHalfAdd(var_58, conv2d_14_b); 
-    void* var_60 = tensorHalfAdd(var_52, var_59); 
-    void* var_61 = tensorHalfRelu(var_60); 
-    void* var_63 = tensorHalfConvolution(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-    void* var_64 = tensorHalfAdd(var_63, conv2d_15_b); 
-    void* var_65 = tensorHalfRelu(var_64); 
-    void* var_67 = tensorHalfConvolution(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-    void* var_68 = tensorHalfAdd(var_67, conv2d_16_b); 
-    void* var_70 = tensorHalfConvolution(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-    void* var_71 = tensorHalfAdd(var_70, conv2d_17_b); 
-    void* var_72 = tensorHalfAdd(var_71, var_68); 
-    void* var_73 = tensorHalfRelu(var_72); 
-    void* var_75 = tensorHalfConvolution(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-    void* var_76 = tensorHalfAdd(var_75, conv2d_18_b); 
-    void* var_77 = tensorHalfRelu(var_76); 
-    void* var_79 = tensorHalfConvolution(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-    void* var_80 = tensorHalfAdd(var_79, conv2d_19_b); 
-    void* var_81 = tensorHalfAdd(var_73, var_80); 
-    void* var_82 = tensorHalfRelu(var_81); 
-    void* var_84 = tensorHalfConvolution(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-    void* var_85 = tensorHalfAdd(var_84, conv2d_20_b); 
-    void* var_86 = tensorHalfRelu(var_85); 
-    void* var_88 = tensorHalfConvolution(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-    void* var_89 = tensorHalfAdd(var_88, conv2d_21_b); 
-    void* var_90 = tensorHalfAdd(var_82, var_89); 
-    void* var_91 = tensorHalfRelu(var_90); 
-    void* var_92 = tensorHalfPooling(var_91,1,8,8,0,0,8,8); 
-    void* var_94 = tensorHalfGemmGPU(var_92, dense_1_w); 
-    void* var_95 = tensorHalfAdd(var_94, dense_1_b); 
-    void* var_96 = tensorSoftmax(var_95); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels,batch_size,var_96); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/vgg16_cifar100_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/vgg16_cifar100_half.cc
deleted file mode 100644
index f6a6d0ecbf6b158d48f0cbdd39f6c417476dcc3e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/vgg16_cifar100_half.cc
+++ /dev/null
@@ -1,160 +0,0 @@
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-int main(){ 
-
-    llvm_hpvm_initTensorRt(0); 
-
-    std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-    std::string input_path =  dir_prefix + std::string("input.bin"); 
-    std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-    std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-    void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-    std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-    void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-    std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-    void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-    std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-    void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-    std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-    void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-    std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-    void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-    std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-    void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-    std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-    void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-    std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-    void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-    std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-    void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-    std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-    void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-    std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-    void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-    std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-    void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-    std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-    void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-    std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-    void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-    std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-    void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-    void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-    std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-    void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-    void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-    std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-    void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-    void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-    std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-    void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-    void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-    std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-    void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-    std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-    void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-    std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-    void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-    std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-    void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-    std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-    void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-    std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-    void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-    std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-    void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-    startMemTracking(); 
-
-    int test_input_size = 2000; 
-    int batch_size = 1000; 
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-        int start = i * batch_size; 
-        int end = (i + 1) * batch_size; 
-
-        void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-        void* var_1 = tensorHalfAdd(var_0, conv2d_1_b); 
-        void* var_2 = tensorHalfRelu(var_1); 
-        void* var_4 = tensorHalfConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-        void* var_5 = tensorHalfAdd(var_4, conv2d_2_b); 
-        void* var_6 = tensorHalfRelu(var_5); 
-        void* var_7 = tensorHalfPooling(var_6,0,2,2,0,0,2,2); 
-        void* var_8 = tensorHalfConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-        void* var_9 = tensorHalfAdd(var_8, conv2d_3_b); 
-        void* var_10 = tensorHalfRelu(var_9); 
-        void* var_12 = tensorHalfConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-        void* var_13 = tensorHalfAdd(var_12, conv2d_4_b); 
-        void* var_14 = tensorHalfRelu(var_13); 
-        void* var_15 = tensorHalfPooling(var_14,0,2,2,0,0,2,2); 
-        void* var_16 = tensorHalfConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-        void* var_17 = tensorHalfAdd(var_16, conv2d_5_b); 
-        void* var_18 = tensorHalfRelu(var_17); 
-        void* var_20 = tensorHalfConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-        void* var_21 = tensorHalfAdd(var_20, conv2d_6_b); 
-        void* var_22 = tensorHalfRelu(var_21); 
-        void* var_24 = tensorHalfConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-        void* var_25 = tensorHalfAdd(var_24, conv2d_7_b); 
-        void* var_26 = tensorHalfRelu(var_25); 
-        void* var_27 = tensorHalfPooling(var_26,0,2,2,0,0,2,2); 
-        void* var_28 = tensorHalfConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-        void* var_29 = tensorHalfAdd(var_28, conv2d_8_b); 
-        void* var_30 = tensorHalfRelu(var_29); 
-        void* var_32 = tensorHalfConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-        void* var_33 = tensorHalfAdd(var_32, conv2d_9_b); 
-        void* var_34 = tensorHalfRelu(var_33); 
-        void* var_36 = tensorHalfConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-        void* var_37 = tensorHalfAdd(var_36, conv2d_10_b); 
-        void* var_38 = tensorHalfRelu(var_37); 
-        void* var_39 = tensorHalfPooling(var_38,0,2,2,0,0,2,2); 
-        void* var_40 = tensorHalfConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-        void* var_41 = tensorHalfAdd(var_40, conv2d_11_b); 
-        void* var_42 = tensorHalfRelu(var_41); 
-        void* var_44 = tensorHalfConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-        void* var_45 = tensorHalfAdd(var_44, conv2d_12_b); 
-        void* var_46 = tensorHalfRelu(var_45); 
-        void* var_48 = tensorHalfConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-        void* var_49 = tensorHalfAdd(var_48, conv2d_13_b); 
-        void* var_50 = tensorHalfRelu(var_49); 
-        void* var_51 = tensorHalfPooling(var_50,0,2,2,0,0,2,2); 
-        void* var_54 = tensorHalfGemmGPU(var_51, dense_1_w); 
-        void* var_55 = tensorHalfAdd(var_54, dense_1_b); 
-        void* var_56 = tensorHalfRelu(var_55); 
-        void* var_58 = tensorHalfGemmGPU(var_56, dense_2_w); 
-        void* var_59 = tensorHalfAdd(var_58, dense_2_b); 
-        void* var_60 = tensorSoftmax(var_59); 
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-        float accuracy = computeAccuracy2(labels, batch_size, var_60, 100); 
-        final_accuracy += accuracy; 
-        freeBatchMemory(); 
-
-    }
-
-    final_accuracy = final_accuracy / batch_count;
-    dumpFinalAccuracy(final_accuracy); 
-
-    llvm_hpvm_cleanupTensorRt(); 
-
-    return 0; 
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/vgg16_cifar10_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/vgg16_cifar10_half.cc
deleted file mode 100644
index b296504a74c51de236da60e217f73925c805616f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/half/vgg16_cifar10_half.cc
+++ /dev/null
@@ -1,167 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h>
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar10_2/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 2000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // Start power and performance profiling 
-  startProfiling();
-
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32); 
- 
-    void* var_0 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-    void* var_1 = tensorHalfAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorHalfRelu(var_1); 
-    void* var_4 = tensorHalfConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-    void* var_5 = tensorHalfAdd(var_4, conv2d_2_b); 
-    void* var_6 = tensorHalfRelu(var_5); 
-    void* var_7 = tensorHalfPooling(var_6,0,2,2,0,0,2,2); 
-    void* var_8 = tensorHalfConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_9 = tensorHalfAdd(var_8, conv2d_3_b); 
-    void* var_10 = tensorHalfRelu(var_9); 
-    void* var_12 = tensorHalfConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_13 = tensorHalfAdd(var_12, conv2d_4_b); 
-    void* var_14 = tensorHalfRelu(var_13); 
-    void* var_15 = tensorHalfPooling(var_14,0,2,2,0,0,2,2); 
-    void* var_16 = tensorHalfConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_17 = tensorHalfAdd(var_16, conv2d_5_b); 
-    void* var_18 = tensorHalfRelu(var_17); 
-    void* var_20 = tensorHalfConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-    void* var_21 = tensorHalfAdd(var_20, conv2d_6_b); 
-    void* var_22 = tensorHalfRelu(var_21); 
-    void* var_24 = tensorHalfConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-    void* var_25 = tensorHalfAdd(var_24, conv2d_7_b); 
-    void* var_26 = tensorHalfRelu(var_25); 
-    void* var_27 = tensorHalfPooling(var_26,0,2,2,0,0,2,2); 
-    void* var_28 = tensorHalfConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-    void* var_29 = tensorHalfAdd(var_28, conv2d_8_b); 
-    void* var_30 = tensorHalfRelu(var_29); 
-    void* var_32 = tensorHalfConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-    void* var_33 = tensorHalfAdd(var_32, conv2d_9_b); 
-    void* var_34 = tensorHalfRelu(var_33); 
-    void* var_36 = tensorHalfConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-    void* var_37 = tensorHalfAdd(var_36, conv2d_10_b); 
-    void* var_38 = tensorHalfRelu(var_37); 
-    void* var_39 = tensorHalfPooling(var_38,0,2,2,0,0,2,2); 
-    void* var_40 = tensorHalfConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-    void* var_41 = tensorHalfAdd(var_40, conv2d_11_b); 
-    void* var_42 = tensorHalfRelu(var_41); 
-    void* var_44 = tensorHalfConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-    void* var_45 = tensorHalfAdd(var_44, conv2d_12_b); 
-    void* var_46 = tensorHalfRelu(var_45); 
-    void* var_48 = tensorHalfConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-    void* var_49 = tensorHalfAdd(var_48, conv2d_13_b); 
-    void* var_50 = tensorHalfRelu(var_49); 
-    void* var_51 = tensorHalfPooling(var_50,0,2,2,0,0,2,2); 
-    void* var_54 = tensorHalfGemmGPU(var_51, dense_1_w); 
-    void* var_55 = tensorHalfAdd(var_54, dense_1_b); 
-    void* var_56 = tensorHalfRelu(var_55); 
-    void* var_58 = tensorHalfGemmGPU(var_56, dense_2_w); 
-    void* var_59 = tensorHalfAdd(var_58, dense_2_b); 
-    void* var_60 = tensorSoftmax(var_59); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels,batch_size,var_60); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  // Start power and performance profiling 
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/layers/cifar10_layers.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/layers/cifar10_layers.cc
deleted file mode 100644
index 4d083f58ba0d9db4ac2f1794be97b5409c7e1508..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/layers/cifar10_layers.cc
+++ /dev/null
@@ -1,157 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  int total_runs = 2;
-  if(Opentuner_run){
-    total_runs = 100000;
-  }
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/cifar10/labels.bin", test_batch_size);
-
-  for(int i = 0; i < total_runs; i++){
-
-    void* input = readTrainedWeights("../model_params/cifar10/input.bin",
-				     float_type,
-				     test_batch_size, 3, 32, 32);
-   
-    void* conv1_filter = readTrainedWeights("../model_params/cifar10/conv1.bin",
-					    float_type, 32, 3, 3, 3);  
-    void* conv1_bias = readTrainedWeights("../model_params/cifar10/conv1_bias.bin",
-					  float_type, 1, 32, 1, 1);  
-    void* conv2_filter = readTrainedWeights("../model_params/cifar10/conv2.bin",
-					    float_type, 64, 32, 3, 3);  
-    void* conv2_bias = readTrainedWeights("../model_params/cifar10/conv2_bias.bin",
-					  float_type, 1, 64, 1, 1);
-    void* conv3_filter = readTrainedWeights("../model_params/cifar10/conv3.bin",
-					    float_type, 128, 64, 3, 3);  
-    void* conv3_bias = readTrainedWeights("../model_params/cifar10/conv3_bias.bin",
-					  float_type, 1, 128, 1, 1);  
-    void* conv4_filter = readTrainedWeights("../model_params/cifar10/conv4.bin",
-					    float_type, 128, 128, 3, 3);  
-    void* conv4_bias = readTrainedWeights("../model_params/cifar10/conv4_bias.bin",
-					  float_type, 1, 128, 1, 1);
-
-  
-    void* fc1_weights = readTrainedWeights("../model_params/cifar10/fc1.bin",
-					   float_type, 1, 1, 2048, 1024);  
-    void* fc1_bias = readTrainedWeights("../model_params/cifar10/fc1_bias.bin",
-					float_type, 1, 1024, 1, 1);  
-    void* fc2_weights = readTrainedWeights("../model_params/cifar10/fc2.bin",
-					   float_type, 1, 1, 1024, 10);  
-    void* fc2_bias = readTrainedWeights("../model_params/cifar10/fc2_bias.bin",
-					float_type, 1, 10, 1, 1);  
-
-  
-    clearTensorMap();
-  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performance profiling 
-    startProfiling();
-
-
-    
-    void* conv1_out = ConvLayer_GPU(input, conv1_filter, conv1_bias,
-				    1, 1, 1, 1, 0, 0, 0, -1,1);
-
-    void* conv2_out = ConvLayer_GPU(conv1_out, conv2_filter, conv2_bias, 
-				    1, 1, 1, 1, 0, 2, 0, -1,1);
-    
-    void* conv3_out = ConvLayer_GPU(conv2_out, conv3_filter, conv3_bias, 
-				    1, 1, 1, 1, 0, 2, 0, -1,1);
-
-    void* conv4_out = ConvLayer_GPU(conv3_out, conv4_filter, conv4_bias, 
-				    1, 1, 1, 1, 0, 2, 0, -1,1);
-
-    void* fc1_out = FCLayer_GPU(conv4_out, fc1_weights, fc1_bias, 0, -1,1);
-    
-    void* fc2_out = FCLayer_GPU(fc1_out, fc2_weights, fc2_bias, 0, -1,1);
-
-    void* result = tensorSoftmax(fc2_out);
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet2_tanh.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet2_tanh.cc
deleted file mode 100644
index d2d663552fdab6366f28655ca835ba63cb4fcee4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet2_tanh.cc
+++ /dev/null
@@ -1,171 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_tanh2/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_tanh2/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_tanh2/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_tanh2/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_tanh2/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet_tanh2/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_tanh2/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet_tanh2/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    startProfiling();
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorConvolution(input, conv1_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-
-    // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-    tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* pool1out = tensorPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv1_tanh = tensorTanh(pool1out);
-
-    // NOTE: input channels have to match between tensor op inputs and outputs 
-    void* conv2out = tensorConvolution(conv1_tanh, conv2_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* pool2out = tensorPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv2_tanh = tensorTanh(pool2out);
-
-    void* gemm1out = tensorGemmGPU(conv2_tanh, fc1_weights);  
-
-    void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-
-    void* tanh1out = tensorTanh(gemm1biasout);
-  
-    void* gemm2out = tensorGemmGPU(tanh1out, fc2_weights);  
-  
-    void* gemm2_biasout = tensorAdd(gemm2out, fc2_bias);
-
-    void* tanh2out = tensorTanh(gemm2_biasout);
-  
-    void* result = tensorSoftmax(tanh2out);
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_front.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_front.cc
deleted file mode 100644
index effb293a8b63119015ed8dbf5f8938eb62f2f89c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_front.cc
+++ /dev/null
@@ -1,61 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/lenet_front/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  void* input = readTrainedWeights(input_path.c_str(), 0,10000,1,28,28); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv0.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv_bias0.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,5,5); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv_bias2.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("fc5.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,3136,1024); 
-  std::string dense_1_b_path =  dir_prefix + std::string("fc_bias5.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("fc6.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("fc_bias6.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-  void* var_0 = tensorConvolution(input, conv2d_1_w, 2, 2, 1, 1, 1, 0); 
-  void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-  void* var_2 = tensorTanh(var_1); 
-  void* var_3 = tensorPooling(var_2,0,2,2,0,0,2,2); 
-  void* var_4 = tensorConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 0); 
-  void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-  void* var_6 = tensorTanh(var_5); 
-  void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-  void* var_9 = tensorGemmGPU(var_7, dense_1_w); 
-  void* var_10 = tensorAdd(var_9, dense_1_b); 
-  void* var_11 = tensorTanh(var_10); 
-  void* var_12 = tensorGemmGPU(var_11, dense_2_w); 
-  void* var_13 = tensorAdd(var_12, dense_2_b); 
-  void* var_14 = tensorTanh(var_13); 
-  void* var_15 = tensorSoftmax(var_14); 
-
-  computeAccuracy2(labels, 10000, var_15);
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_int32.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_int32.cc
deleted file mode 100644
index 03a4137004fe063a4536efec8fa7ecf2d8d2b374..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_int32.cc
+++ /dev/null
@@ -1,91 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../../keras/data/lenet_test_8/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,5,5); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,3136,1024); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 10000; 
-  int batch_size = 10000; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,1,28,28); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 2, 2, 1, 1, 1, 1); 
-    void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_3 = tensorPooling(var_2,0,2,2,0,0,2,2); 
-    void* var_4 = tensorConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 1); 
-    void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_8 = tensorConvolution(var_6, conv2d_3_w, 1, 1, 2, 2, 1, 1); 
-    void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-    void* var_10 = tensorRelu(var_9); 
-    void* var_12 = tensorGemmGPU(var_10, dense_1_w); 
-    void* var_13 = tensorAdd(var_12, dense_1_b); 
-    void* var_14 = tensorRelu(var_13); 
-    void* var_15 = tensorGemmGPU(var_14, dense_2_w); 
-    void* var_16 = tensorAdd(var_15, dense_2_b); 
-    void* var_17 = tensorRelu(var_16); 
-    void* var_18 = tensorSoftmax(var_17); 
-
-    uint32_t* labels = readLabelsBatch3(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy3(labels, var_18); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_keras.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_keras.cc
deleted file mode 100644
index edf36410761c7ae19da50067296d4c47b203504a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_keras.cc
+++ /dev/null
@@ -1,171 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_keras/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_keras/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_keras/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_keras/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_keras/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet_keras/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_keras/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet_keras/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    startProfiling();
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorConvolution(input, conv1_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-
-    // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-    tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* pool1out = tensorPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv1_tanh = tensorTanh(pool1out);
-
-    // NOTE: input channels have to match between tensor op inputs and outputs 
-    void* conv2out = tensorConvolution(conv1_tanh, conv2_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* pool2out = tensorPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv2_tanh = tensorTanh(pool2out);
-
-    void* gemm1out = tensorGemmGPU(conv2_tanh, fc1_weights);  
-
-    void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-
-    void* tanh1out = tensorTanh(gemm1biasout);
-  
-    void* gemm2out = tensorGemmGPU(tanh1out, fc2_weights);  
-  
-    void* gemm2_biasout = tensorAdd(gemm2out, fc2_bias);
-
-    void* tanh2out = tensorTanh(gemm2_biasout);
-  
-    void* result = tensorSoftmax(tanh2out);
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_layers.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_layers.cc
deleted file mode 100644
index a6b777e36c1b31440a3ad7d227df4915b1cc27df..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_layers.cc
+++ /dev/null
@@ -1,61 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-
-int main(){ 
-
-  llvm_hpvm_initializeRuntimeController("tuner_confs.txt", "quant_ranges_rt.txt");
-  llvm_hpvm_initApproxhpvmRt(0);
-  
-
-  std::string dir_prefix = std::string("../model_params/lenet_relu/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  void* input = readTrainedWeights(input_path.c_str(), 0,10000,1,28,28); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,5,5); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,3136,1024); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-  void* var_0 = wrapper_ConvLayer("1", input, conv2d_1_w, conv2d_1_b, 2, 2, 1, 1, 0, 2, 1, 0, 0); 
-  void* var_1 = wrapper_ConvLayer("2", var_0, conv2d_2_w, conv2d_2_b, 2, 2, 1, 1, -1, 0, 1, 0, 0); 
-  void* var_2 = wrapper_ConvLayer("3", var_1, conv2d_3_w, conv2d_3_b, 1, 1, 2, 2, -1, 0, 1, 0, 0); 
-  void* var_3 = wrapper_FCLayer("4", var_2, dense_1_w, dense_1_b, 1, 0, 0); 
-  void* var_4 = wrapper_FCLayer("5", var_3, dense_2_w, dense_2_b, 1, 0, 0); 
-  void* var_5 = tensorSoftmax(var_4); 
-
-  computeAccuracy2(labels,10000,var_5); 
-
-  
-  llvm_hpvm_cleanupApproxhpvmRt(); 
-  llvm_hpvm_clearRuntimeController();
-
-  
-  return 0; 
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_perf.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_perf.cc
deleted file mode 100644
index 7c9583f291ea908c4c89a8b56045e06585a4f83a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/lenet_perf.cc
+++ /dev/null
@@ -1,185 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-int total_runs = 1;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 1000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_keras/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_keras/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_keras/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_keras/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_keras/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet_keras/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_keras/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet_keras/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    startProfiling();
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    //void* conv1out = tensorConvPerfCuda(input, conv1_filter, 2, 2, 1, 1,
-    //				conv_mode, conv_precision, 2, 2, 1);
-
-    void* conv1out = tensorConvSampSim(input, conv1_filter, 2, 2, 1, 1,
-    				       conv_mode, conv_precision, 4, 0);
-
-    // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-    tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* pool1out = tensorPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv1_tanh = tensorTanh(pool1out);
-
-    // NOTE: input channels have to match between tensor op inputs and outputs 
-    //void* conv2out = tensorConvPerfCuda(conv1_tanh, conv2_filter, 2, 2, 1, 1,
-    //				conv_mode, conv_precision, 1, 2, 1);
-
-    void* conv2out = tensorConvSampSim(conv1_tanh, conv2_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision, 2, 0);
-    
-    tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* pool2out = tensorPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv2_tanh = tensorTanh(pool2out);
-
-    void* gemm1out = tensorGemmGPU(conv2_tanh, fc1_weights);  
-
-    void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-
-    void* tanh1out = tensorTanh(gemm1biasout);
-  
-    void* gemm2out = tensorGemmGPU(tanh1out, fc2_weights);  
-  
-    void* gemm2_biasout = tensorAdd(gemm2out, fc2_bias);
-
-    void* tanh2out = tensorTanh(gemm2_biasout);
-  
-    void* result = tensorSoftmax(tanh2out);
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    float accuracy = computeAccuracy2(labels, test_batch_size, result);
-    dumpFinalAccuracy(accuracy); 
-
-    
-    //FIXME: remove the comment below to use piped autotuner
-    //dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-  dumpExecutionAccuracies();
-
-  
-}
-
-
-
-int main(int argc, char* argv[]){
-
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet.cc
deleted file mode 100644
index ba7af9846916057fedc05757bdad77fefb01590e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet.cc
+++ /dev/null
@@ -1,413 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(1); 
-
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_hpvm_3/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 3000; 
-  int batch_size = 1000; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-    void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_5 = tensorRelu(var_4); 
-    void* var_6 = tensorConvolution(var_5, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-    void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-    void* var_8 = tensorRelu(var_7); 
-    void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-    void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-    void* var_11 = tensorRelu(var_10); 
-    void* var_12 = tensorConvolution(var_11, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-    void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-    void* var_14 = tensorRelu(var_13); 
-    void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-    void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-    void* var_17 = tensorRelu(var_16); 
-    void* var_18 = tensorConvolution(var_17, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-    void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-    void* var_20 = tensorRelu(var_19); 
-    void* var_22 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-    void* var_23 = tensorBatchNorm(var_22, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-    void* var_24 = tensorRelu(var_23); 
-    void* var_25 = tensorConvolution(var_24, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-    void* var_26 = tensorBatchNorm(var_25, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-    void* var_27 = tensorRelu(var_26); 
-    void* var_28 = tensorConvolution(var_27, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-    void* var_29 = tensorBatchNorm(var_28, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-    void* var_30 = tensorRelu(var_29); 
-    void* var_31 = tensorConvolution(var_30, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-    void* var_32 = tensorBatchNorm(var_31, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-    void* var_33 = tensorRelu(var_32); 
-    void* var_35 = tensorConvolution(var_33, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-    void* var_36 = tensorBatchNorm(var_35, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-    void* var_37 = tensorRelu(var_36); 
-    void* var_38 = tensorConvolution(var_37, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-    void* var_39 = tensorBatchNorm(var_38, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-    void* var_40 = tensorRelu(var_39); 
-    void* var_41 = tensorConvolution(var_40, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-    void* var_42 = tensorBatchNorm(var_41, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_44 = tensorConvolution(var_43, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-    void* var_45 = tensorBatchNorm(var_44, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_47 = tensorConvolution(var_46, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-    void* var_48 = tensorBatchNorm(var_47, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-    void* var_49 = tensorRelu(var_48); 
-    void* var_50 = tensorConvolution(var_49, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-    void* var_51 = tensorBatchNorm(var_50, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-    void* var_52 = tensorRelu(var_51); 
-    void* var_54 = tensorConvolution(var_52, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-    void* var_55 = tensorBatchNorm(var_54, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_57 = tensorConvolution(var_56, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-    void* var_58 = tensorBatchNorm(var_57, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-    void* var_59 = tensorRelu(var_58); 
-    void* var_60 = tensorConvolution(var_59, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-    void* var_61 = tensorBatchNorm(var_60, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-    void* var_62 = tensorRelu(var_61); 
-    void* var_63 = tensorConvolution(var_62, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-    void* var_64 = tensorBatchNorm(var_63, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-    void* var_65 = tensorRelu(var_64); 
-    void* var_66 = tensorConvolution(var_65, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-    void* var_67 = tensorBatchNorm(var_66, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-    void* var_68 = tensorRelu(var_67); 
-    void* var_69 = tensorConvolution(var_68, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-    void* var_70 = tensorBatchNorm(var_69, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-    void* var_71 = tensorRelu(var_70); 
-    void* var_73 = tensorConvolution(var_71, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-    void* var_74 = tensorBatchNorm(var_73, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-    void* var_75 = tensorRelu(var_74); 
-    void* var_76 = tensorConvolution(var_75, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-    void* var_77 = tensorBatchNorm(var_76, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-    void* var_78 = tensorRelu(var_77); 
-    void* var_79 = tensorConvolution(var_78, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-    void* var_80 = tensorBatchNorm(var_79, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-    void* var_81 = tensorRelu(var_80); 
-    void* var_82 = tensorConvolution(var_81, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-    void* var_83 = tensorBatchNorm(var_82, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-    void* var_84 = tensorRelu(var_83); 
-    void* var_86 = tensorPooling(var_84,1,2,2,0,0,2,2); 
-    void* var_88 = tensorGemmGPU(var_86, dense_1_w); 
-    void* var_89 = tensorAdd(var_88, dense_1_b); 
-    void* var_90 = tensorSoftmax(var_89); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_90); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_cifar10.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_cifar10.cc
deleted file mode 100644
index 80a55dfd6b138ffa23b1ac9a7148025a491948ed..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_cifar10.cc
+++ /dev/null
@@ -1,413 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_quant/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 10000; 
-  int batch_size = 1000; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-    void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-    void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-    void* var_9 = tensorRelu(var_8); 
-    void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-    void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-    void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-    void* var_16 = tensorRelu(var_15); 
-    void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-    void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-    void* var_20 = tensorRelu(var_19); 
-    void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-    void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-    void* var_23 = tensorRelu(var_22); 
-    void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-    void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-    void* var_28 = tensorRelu(var_27); 
-    void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-    void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-    void* var_31 = tensorRelu(var_30); 
-    void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-    void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-    void* var_35 = tensorRelu(var_34); 
-    void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-    void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_41 = tensorConvolution(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-    void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-    void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_48 = tensorConvolution(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-    void* var_49 = tensorBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-    void* var_50 = tensorRelu(var_49); 
-    void* var_51 = tensorConvolution(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-    void* var_52 = tensorBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-    void* var_53 = tensorRelu(var_52); 
-    void* var_55 = tensorConvolution(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-    void* var_56 = tensorBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-    void* var_57 = tensorRelu(var_56); 
-    void* var_58 = tensorConvolution(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-    void* var_59 = tensorBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-    void* var_60 = tensorRelu(var_59); 
-    void* var_63 = tensorConvolution(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-    void* var_64 = tensorBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-    void* var_65 = tensorRelu(var_64); 
-    void* var_66 = tensorConvolution(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-    void* var_67 = tensorBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-    void* var_68 = tensorRelu(var_67); 
-    void* var_70 = tensorConvolution(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-    void* var_71 = tensorBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-    void* var_72 = tensorRelu(var_71); 
-    void* var_73 = tensorConvolution(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-    void* var_74 = tensorBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-    void* var_75 = tensorRelu(var_74); 
-    void* var_77 = tensorConvolution(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-    void* var_78 = tensorBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-    void* var_79 = tensorRelu(var_78); 
-    void* var_80 = tensorConvolution(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-    void* var_81 = tensorBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-    void* var_82 = tensorRelu(var_81); 
-    void* var_85 = tensorConvolution(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-    void* var_86 = tensorBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-    void* var_87 = tensorRelu(var_86); 
-    void* var_88 = tensorConvolution(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-    void* var_89 = tensorBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-    void* var_90 = tensorRelu(var_89); 
-    void* var_92 = tensorConvolution(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-    void* var_93 = tensorBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-    void* var_94 = tensorRelu(var_93); 
-    void* var_95 = tensorConvolution(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-    void* var_96 = tensorBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-    void* var_97 = tensorRelu(var_96); 
-    void* var_99 = tensorPooling(var_97,1,2,2,0,0,2,2); 
-    void* var_101 = tensorGemmGPU(var_99, dense_1_w); 
-    void* var_102 = tensorAdd(var_101, dense_1_b); 
-    void* var_103 = tensorSoftmax(var_102); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_cifar10_shallow.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_cifar10_shallow.cc
deleted file mode 100644
index 89aa451dc1a5ee7b532bd5375e2e71e520c1372e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_cifar10_shallow.cc
+++ /dev/null
@@ -1,242 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(int argc, char* argv[]){ 
-
-  int total_runs = 1;
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  
-  llvm_hpvm_initTensorRt(0); 
-
-  //std::string dir_prefix = std::string("../../keras/data/mobilenet_shallow_nathan/");
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_cifar10_shallow/"); 
-
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 1000; 
-  int batch_size = 1000; 
-  int batch_count = test_input_size / batch_size; 
-
-
-  for(int j = 0; j < total_runs; j++){    
-    float final_accuracy = 0.0;    
-    for(int i = 0; i < batch_count; i++){ 
-
-      int start = i * batch_size; 
-      int end = (i + 1) * batch_size; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-      void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorRelu(var_1); 
-      void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_6 = tensorRelu(var_5); 
-      void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-      void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-      void* var_9 = tensorRelu(var_8); 
-      void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-      void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-      void* var_13 = tensorRelu(var_12); 
-      void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-      void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-      void* var_16 = tensorRelu(var_15); 
-      void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-      void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-      void* var_20 = tensorRelu(var_19); 
-      void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-      void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-      void* var_23 = tensorRelu(var_22); 
-      void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-      void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-      void* var_28 = tensorRelu(var_27); 
-      void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-      void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-      void* var_31 = tensorRelu(var_30); 
-      void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-      void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-      void* var_35 = tensorRelu(var_34); 
-      void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-      void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_41 = tensorConvolution(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-      void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-      void* var_43 = tensorRelu(var_42); 
-      void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-      void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-      void* var_46 = tensorRelu(var_45); 
-      void* var_47 = tensorPooling(var_46,1,2,2,0,0,2,2); 
-      void* var_49 = tensorGemmGPU(var_47, dense_1_w); 
-      void* var_50 = tensorAdd(var_49, dense_1_b); 
-      void* var_51 = tensorSoftmax(var_50); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_51); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-  }
-
-  dumpExecutionAccuracies();
-    
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_custom_depthwise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_custom_depthwise.cc
deleted file mode 100644
index e30844480947b89e5572c1687f66c445652d5536..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_custom_depthwise.cc
+++ /dev/null
@@ -1,414 +0,0 @@
-
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../hpvm-tensor-rt/model_params/mobilenet_quant/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 10000; 
-  int batch_size = 100;  
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvCutlass(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-    void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvCutlass(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorConvCutlass(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-    void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-    void* var_9 = tensorRelu(var_8); 
-    void* var_11 = tensorConvCutlass(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-    void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_14 = tensorConvCutlass(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-    void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-    void* var_16 = tensorRelu(var_15); 
-    void* var_18 = tensorConvCutlass(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-    void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-    void* var_20 = tensorRelu(var_19); 
-    void* var_21 = tensorConvCutlass(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-    void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-    void* var_23 = tensorRelu(var_22); 
-    void* var_26 = tensorConvCutlass(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-    void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-    void* var_28 = tensorRelu(var_27); 
-    void* var_29 = tensorConvCutlass(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-    void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-    void* var_31 = tensorRelu(var_30); 
-    void* var_33 = tensorConvCutlass(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-    void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-    void* var_35 = tensorRelu(var_34); 
-    void* var_36 = tensorConvCutlass(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-    void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_41 = tensorConvCutlass(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-    void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_44 = tensorConvCutlass(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-    void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_48 = tensorConvCutlass(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-    void* var_49 = tensorBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-    void* var_50 = tensorRelu(var_49); 
-    void* var_51 = tensorConvCutlass(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-    void* var_52 = tensorBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-    void* var_53 = tensorRelu(var_52); 
-    void* var_55 = tensorConvCutlass(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-    void* var_56 = tensorBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-    void* var_57 = tensorRelu(var_56); 
-    void* var_58 = tensorConvCutlass(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-    void* var_59 = tensorBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-    void* var_60 = tensorRelu(var_59); 
-    void* var_63 = tensorConvCutlass(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-    void* var_64 = tensorBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-    void* var_65 = tensorRelu(var_64); 
-    void* var_66 = tensorConvCutlass(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-    void* var_67 = tensorBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-    void* var_68 = tensorRelu(var_67); 
-    void* var_70 = tensorConvCutlass(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-    void* var_71 = tensorBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-    void* var_72 = tensorRelu(var_71); 
-    void* var_73 = tensorConvCutlass(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-    void* var_74 = tensorBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-    void* var_75 = tensorRelu(var_74); 
-    void* var_77 = tensorConvCutlass(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-    void* var_78 = tensorBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-    void* var_79 = tensorRelu(var_78); 
-    void* var_80 = tensorConvCutlass(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-    void* var_81 = tensorBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-    void* var_82 = tensorRelu(var_81); 
-    void* var_85 = tensorConvCutlass(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-    void* var_86 = tensorBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-    void* var_87 = tensorRelu(var_86); 
-    void* var_88 = tensorConvCutlass(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-    void* var_89 = tensorBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-    void* var_90 = tensorRelu(var_89); 
-    void* var_92 = tensorConvCutlass(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-    void* var_93 = tensorBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-    void* var_94 = tensorRelu(var_93); 
-    void* var_95 = tensorConvCutlass(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-    void* var_96 = tensorBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-    void* var_97 = tensorRelu(var_96); 
-    void* var_99 = tensorPooling(var_97,1,2,2,0,0,2,2); 
-    void* var_101 = tensorGemmGPU(var_99, dense_1_w); 
-    void* var_102 = tensorAdd(var_101, dense_1_b); 
-    void* var_103 = tensorSoftmax(var_102); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
-
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_depthwise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_depthwise.cc
deleted file mode 100644
index 107024c81a7d8124a46528f7a59fac5af340bcac..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_depthwise.cc
+++ /dev/null
@@ -1,414 +0,0 @@
-
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/mobilenet/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 2500;  
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-    void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvCutlass(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-    void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-    void* var_9 = tensorRelu(var_8); 
-    void* var_11 = tensorConvCutlass(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-    void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-    void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-    void* var_16 = tensorRelu(var_15); 
-    void* var_18 = tensorConvCutlass(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-    void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-    void* var_20 = tensorRelu(var_19); 
-    void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-    void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-    void* var_23 = tensorRelu(var_22); 
-    void* var_26 = tensorConvCutlass(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-    void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-    void* var_28 = tensorRelu(var_27); 
-    void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-    void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-    void* var_31 = tensorRelu(var_30); 
-    void* var_33 = tensorConvCutlass(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-    void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-    void* var_35 = tensorRelu(var_34); 
-    void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-    void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_41 = tensorConvCutlass(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-    void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-    void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_48 = tensorConvCutlass(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-    void* var_49 = tensorBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-    void* var_50 = tensorRelu(var_49); 
-    void* var_51 = tensorConvolution(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-    void* var_52 = tensorBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-    void* var_53 = tensorRelu(var_52); 
-    void* var_55 = tensorConvCutlass(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-    void* var_56 = tensorBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-    void* var_57 = tensorRelu(var_56); 
-    void* var_58 = tensorConvolution(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-    void* var_59 = tensorBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-    void* var_60 = tensorRelu(var_59); 
-    void* var_63 = tensorConvCutlass(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-    void* var_64 = tensorBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-    void* var_65 = tensorRelu(var_64); 
-    void* var_66 = tensorConvolution(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-    void* var_67 = tensorBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-    void* var_68 = tensorRelu(var_67); 
-    void* var_70 = tensorConvCutlass(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-    void* var_71 = tensorBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-    void* var_72 = tensorRelu(var_71); 
-    void* var_73 = tensorConvolution(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-    void* var_74 = tensorBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-    void* var_75 = tensorRelu(var_74); 
-    void* var_77 = tensorConvCutlass(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-    void* var_78 = tensorBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-    void* var_79 = tensorRelu(var_78); 
-    void* var_80 = tensorConvolution(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-    void* var_81 = tensorBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-    void* var_82 = tensorRelu(var_81); 
-    void* var_85 = tensorConvCutlass(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-    void* var_86 = tensorBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-    void* var_87 = tensorRelu(var_86); 
-    void* var_88 = tensorConvolution(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-    void* var_89 = tensorBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-    void* var_90 = tensorRelu(var_89); 
-    void* var_92 = tensorConvCutlass(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-    void* var_93 = tensorBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-    void* var_94 = tensorRelu(var_93); 
-    void* var_95 = tensorConvolution(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-    void* var_96 = tensorBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-    void* var_97 = tensorRelu(var_96); 
-    void* var_99 = tensorPooling(var_97,1,2,2,0,0,2,2); 
-    void* var_101 = tensorGemmGPU(var_99, dense_1_w); 
-    void* var_102 = tensorAdd(var_101, dense_1_b); 
-    void* var_103 = tensorSoftmax(var_102); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
-
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_shallow.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_shallow.cc
deleted file mode 100644
index 8905a93edb825b36c9e301ad1e450428740b4cb1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_shallow.cc
+++ /dev/null
@@ -1,203 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(1); 
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_shallow/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 10000; 
-  int batch_size = 2000; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-    void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-    void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-    void* var_9 = tensorRelu(var_8); 
-    void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-    void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-    void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-    void* var_16 = tensorRelu(var_15); 
-    void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 2, 2, 1, 64); 
-    void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-    void* var_20 = tensorRelu(var_19); 
-    void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-    void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-    void* var_23 = tensorRelu(var_22); 
-    void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-    void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-    void* var_28 = tensorRelu(var_27); 
-    void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-    void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-    void* var_31 = tensorRelu(var_30); 
-    void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-    void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-    void* var_35 = tensorRelu(var_34); 
-    void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-    void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_40 = tensorPooling(var_38,1,2,2,0,0,2,2); 
-    void* var_42 = tensorGemmGPU(var_40, dense_1_w); 
-    void* var_43 = tensorAdd(var_42, dense_1_b); 
-    void* var_44 = tensorSoftmax(var_43); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_44); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_shallow2.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_shallow2.cc
deleted file mode 100644
index ee2c51b2399505d3a98b54920d9700dbd0548b86..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_shallow2.cc
+++ /dev/null
@@ -1,231 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_shallow2/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 2500; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-    void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-    void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-    void* var_9 = tensorRelu(var_8); 
-    void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-    void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-    void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-    void* var_16 = tensorRelu(var_15); 
-    void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-    void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-    void* var_20 = tensorRelu(var_19); 
-    void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-    void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-    void* var_23 = tensorRelu(var_22); 
-    void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-    void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-    void* var_28 = tensorRelu(var_27); 
-    void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-    void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-    void* var_31 = tensorRelu(var_30); 
-    void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-    void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-    void* var_35 = tensorRelu(var_34); 
-    void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-    void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_41 = tensorConvolution(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-    void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-    void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_47 = tensorPooling(var_46,1,2,2,0,0,2,2); 
-    void* var_49 = tensorGemmGPU(var_47, dense_1_w); 
-    void* var_50 = tensorAdd(var_49, dense_1_b); 
-    void* var_51 = tensorSoftmax(var_50); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_51); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_shallow_depthwise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_shallow_depthwise.cc
deleted file mode 100644
index 4e8e6008927c1a5e85ff74884842c0ae310a7d75..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenet_shallow_depthwise.cc
+++ /dev/null
@@ -1,244 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(int argc, char* argv[]){ 
-
-  int total_runs = 1;
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  
-  llvm_hpvm_initTensorRt(0); 
-
-  //std::string dir_prefix = std::string("../../keras/data/mobilenet_shallow_nathan/");
-
-  //std::string dir_prefix = std::string("../model_params/mobilenet_cifar10_shallow/"); 
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_shallow/"); 
-
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 2500; 
-  int batch_count = test_input_size / batch_size; 
-
-
-  for(int j = 0; j < total_runs; j++){    
-    float final_accuracy = 0.0;    
-    for(int i = 0; i < batch_count; i++){ 
-
-      int start = i * batch_size; 
-      int end = (i + 1) * batch_size; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-      void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorRelu(var_1); 
-      void* var_4 = tensorConvCutlass(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_6 = tensorRelu(var_5); 
-      void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-      void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-      void* var_9 = tensorRelu(var_8); 
-      void* var_11 = tensorConvCutlass(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-      void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-      void* var_13 = tensorRelu(var_12); 
-      void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-      void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-      void* var_16 = tensorRelu(var_15); 
-      void* var_18 = tensorConvCutlass(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-      void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-      void* var_20 = tensorRelu(var_19); 
-      void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-      void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-      void* var_23 = tensorRelu(var_22); 
-      void* var_26 = tensorConvCutlass(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-      void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-      void* var_28 = tensorRelu(var_27); 
-      void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-      void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-      void* var_31 = tensorRelu(var_30); 
-      void* var_33 = tensorConvCutlass(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-      void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-      void* var_35 = tensorRelu(var_34); 
-      void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-      void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_41 = tensorConvCutlass(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-      void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-      void* var_43 = tensorRelu(var_42); 
-      void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-      void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-      void* var_46 = tensorRelu(var_45); 
-      void* var_47 = tensorPooling(var_46,1,2,2,0,0,2,2); 
-      void* var_49 = tensorGemmGPU(var_47, dense_1_w); 
-      void* var_50 = tensorAdd(var_49, dense_1_b); 
-      void* var_51 = tensorSoftmax(var_50); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_51); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-  }
-
-  dumpExecutionAccuracies();
-    
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenetv2_cifar10.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenetv2_cifar10.cc
deleted file mode 100644
index fcbb17f411adaf5e46162a0524efc97c90174506..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/mobilenetv2_cifar10.cc
+++ /dev/null
@@ -1,721 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/mobilenetv2_quant/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,32,1,1); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,16,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,16,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,16,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,96,16,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,96,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,96,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,96,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,24,96,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,24,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,24,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,24,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,24,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,144,24,1,1); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,144,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,144,1,3,3); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,144,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,24,144,1,1); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,24,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,24,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,24,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,24,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,144,24,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,144,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,144,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,144,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,144,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,144,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,192,32,1,1); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,192,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,192,1,3,3); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,192,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,192,1,1); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,192,32,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,192,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,192,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,192,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,192,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,192,32,1,1); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,192,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,192,1,3,3); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,192,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,192,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,64,192,1,1); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,384,64,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,384,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,384,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,384,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,384,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,384,64,1,1); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,384,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,384,1,3,3); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,384,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,384,1,1); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,384,64,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,384,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,384,1,3,3); 
-  std::string batch_normalization_28_gamma_path =  dir_prefix + std::string("batch_normalization_28_gamma.bin"); 
-  void* batch_normalization_28_gamma =  readTrainedWeights(batch_normalization_28_gamma_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_28_beta_path =  dir_prefix + std::string("batch_normalization_28_beta.bin"); 
-  void* batch_normalization_28_beta =  readTrainedWeights(batch_normalization_28_beta_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_28_mean_path =  dir_prefix + std::string("batch_normalization_28_mean.bin"); 
-  void* batch_normalization_28_mean =  readTrainedWeights(batch_normalization_28_mean_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_28_variance_path =  dir_prefix + std::string("batch_normalization_28_variance.bin"); 
-  void* batch_normalization_28_variance =  readTrainedWeights(batch_normalization_28_variance_path.c_str(), 0,1,384,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,384,1,1); 
-  std::string batch_normalization_29_gamma_path =  dir_prefix + std::string("batch_normalization_29_gamma.bin"); 
-  void* batch_normalization_29_gamma =  readTrainedWeights(batch_normalization_29_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_29_beta_path =  dir_prefix + std::string("batch_normalization_29_beta.bin"); 
-  void* batch_normalization_29_beta =  readTrainedWeights(batch_normalization_29_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_29_mean_path =  dir_prefix + std::string("batch_normalization_29_mean.bin"); 
-  void* batch_normalization_29_mean =  readTrainedWeights(batch_normalization_29_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_29_variance_path =  dir_prefix + std::string("batch_normalization_29_variance.bin"); 
-  void* batch_normalization_29_variance =  readTrainedWeights(batch_normalization_29_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,384,64,1,1); 
-  std::string batch_normalization_30_gamma_path =  dir_prefix + std::string("batch_normalization_30_gamma.bin"); 
-  void* batch_normalization_30_gamma =  readTrainedWeights(batch_normalization_30_gamma_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_30_beta_path =  dir_prefix + std::string("batch_normalization_30_beta.bin"); 
-  void* batch_normalization_30_beta =  readTrainedWeights(batch_normalization_30_beta_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_30_mean_path =  dir_prefix + std::string("batch_normalization_30_mean.bin"); 
-  void* batch_normalization_30_mean =  readTrainedWeights(batch_normalization_30_mean_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_30_variance_path =  dir_prefix + std::string("batch_normalization_30_variance.bin"); 
-  void* batch_normalization_30_variance =  readTrainedWeights(batch_normalization_30_variance_path.c_str(), 0,1,384,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,384,1,3,3); 
-  std::string batch_normalization_31_gamma_path =  dir_prefix + std::string("batch_normalization_31_gamma.bin"); 
-  void* batch_normalization_31_gamma =  readTrainedWeights(batch_normalization_31_gamma_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_31_beta_path =  dir_prefix + std::string("batch_normalization_31_beta.bin"); 
-  void* batch_normalization_31_beta =  readTrainedWeights(batch_normalization_31_beta_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_31_mean_path =  dir_prefix + std::string("batch_normalization_31_mean.bin"); 
-  void* batch_normalization_31_mean =  readTrainedWeights(batch_normalization_31_mean_path.c_str(), 0,1,384,1,1); 
-  std::string batch_normalization_31_variance_path =  dir_prefix + std::string("batch_normalization_31_variance.bin"); 
-  void* batch_normalization_31_variance =  readTrainedWeights(batch_normalization_31_variance_path.c_str(), 0,1,384,1,1); 
-  std::string conv2d_22_w_path =  dir_prefix + std::string("conv2d_22_w.bin"); 
-  void* conv2d_22_w =  readTrainedWeights(conv2d_22_w_path.c_str(), 0,96,384,1,1); 
-  std::string batch_normalization_32_gamma_path =  dir_prefix + std::string("batch_normalization_32_gamma.bin"); 
-  void* batch_normalization_32_gamma =  readTrainedWeights(batch_normalization_32_gamma_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_32_beta_path =  dir_prefix + std::string("batch_normalization_32_beta.bin"); 
-  void* batch_normalization_32_beta =  readTrainedWeights(batch_normalization_32_beta_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_32_mean_path =  dir_prefix + std::string("batch_normalization_32_mean.bin"); 
-  void* batch_normalization_32_mean =  readTrainedWeights(batch_normalization_32_mean_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_32_variance_path =  dir_prefix + std::string("batch_normalization_32_variance.bin"); 
-  void* batch_normalization_32_variance =  readTrainedWeights(batch_normalization_32_variance_path.c_str(), 0,1,96,1,1); 
-  std::string conv2d_23_w_path =  dir_prefix + std::string("conv2d_23_w.bin"); 
-  void* conv2d_23_w =  readTrainedWeights(conv2d_23_w_path.c_str(), 0,576,96,1,1); 
-  std::string batch_normalization_33_gamma_path =  dir_prefix + std::string("batch_normalization_33_gamma.bin"); 
-  void* batch_normalization_33_gamma =  readTrainedWeights(batch_normalization_33_gamma_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_33_beta_path =  dir_prefix + std::string("batch_normalization_33_beta.bin"); 
-  void* batch_normalization_33_beta =  readTrainedWeights(batch_normalization_33_beta_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_33_mean_path =  dir_prefix + std::string("batch_normalization_33_mean.bin"); 
-  void* batch_normalization_33_mean =  readTrainedWeights(batch_normalization_33_mean_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_33_variance_path =  dir_prefix + std::string("batch_normalization_33_variance.bin"); 
-  void* batch_normalization_33_variance =  readTrainedWeights(batch_normalization_33_variance_path.c_str(), 0,1,576,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,576,1,3,3); 
-  std::string batch_normalization_34_gamma_path =  dir_prefix + std::string("batch_normalization_34_gamma.bin"); 
-  void* batch_normalization_34_gamma =  readTrainedWeights(batch_normalization_34_gamma_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_34_beta_path =  dir_prefix + std::string("batch_normalization_34_beta.bin"); 
-  void* batch_normalization_34_beta =  readTrainedWeights(batch_normalization_34_beta_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_34_mean_path =  dir_prefix + std::string("batch_normalization_34_mean.bin"); 
-  void* batch_normalization_34_mean =  readTrainedWeights(batch_normalization_34_mean_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_34_variance_path =  dir_prefix + std::string("batch_normalization_34_variance.bin"); 
-  void* batch_normalization_34_variance =  readTrainedWeights(batch_normalization_34_variance_path.c_str(), 0,1,576,1,1); 
-  std::string conv2d_24_w_path =  dir_prefix + std::string("conv2d_24_w.bin"); 
-  void* conv2d_24_w =  readTrainedWeights(conv2d_24_w_path.c_str(), 0,96,576,1,1); 
-  std::string batch_normalization_35_gamma_path =  dir_prefix + std::string("batch_normalization_35_gamma.bin"); 
-  void* batch_normalization_35_gamma =  readTrainedWeights(batch_normalization_35_gamma_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_35_beta_path =  dir_prefix + std::string("batch_normalization_35_beta.bin"); 
-  void* batch_normalization_35_beta =  readTrainedWeights(batch_normalization_35_beta_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_35_mean_path =  dir_prefix + std::string("batch_normalization_35_mean.bin"); 
-  void* batch_normalization_35_mean =  readTrainedWeights(batch_normalization_35_mean_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_35_variance_path =  dir_prefix + std::string("batch_normalization_35_variance.bin"); 
-  void* batch_normalization_35_variance =  readTrainedWeights(batch_normalization_35_variance_path.c_str(), 0,1,96,1,1); 
-  std::string conv2d_25_w_path =  dir_prefix + std::string("conv2d_25_w.bin"); 
-  void* conv2d_25_w =  readTrainedWeights(conv2d_25_w_path.c_str(), 0,576,96,1,1); 
-  std::string batch_normalization_36_gamma_path =  dir_prefix + std::string("batch_normalization_36_gamma.bin"); 
-  void* batch_normalization_36_gamma =  readTrainedWeights(batch_normalization_36_gamma_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_36_beta_path =  dir_prefix + std::string("batch_normalization_36_beta.bin"); 
-  void* batch_normalization_36_beta =  readTrainedWeights(batch_normalization_36_beta_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_36_mean_path =  dir_prefix + std::string("batch_normalization_36_mean.bin"); 
-  void* batch_normalization_36_mean =  readTrainedWeights(batch_normalization_36_mean_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_36_variance_path =  dir_prefix + std::string("batch_normalization_36_variance.bin"); 
-  void* batch_normalization_36_variance =  readTrainedWeights(batch_normalization_36_variance_path.c_str(), 0,1,576,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,576,1,3,3); 
-  std::string batch_normalization_37_gamma_path =  dir_prefix + std::string("batch_normalization_37_gamma.bin"); 
-  void* batch_normalization_37_gamma =  readTrainedWeights(batch_normalization_37_gamma_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_37_beta_path =  dir_prefix + std::string("batch_normalization_37_beta.bin"); 
-  void* batch_normalization_37_beta =  readTrainedWeights(batch_normalization_37_beta_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_37_mean_path =  dir_prefix + std::string("batch_normalization_37_mean.bin"); 
-  void* batch_normalization_37_mean =  readTrainedWeights(batch_normalization_37_mean_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_37_variance_path =  dir_prefix + std::string("batch_normalization_37_variance.bin"); 
-  void* batch_normalization_37_variance =  readTrainedWeights(batch_normalization_37_variance_path.c_str(), 0,1,576,1,1); 
-  std::string conv2d_26_w_path =  dir_prefix + std::string("conv2d_26_w.bin"); 
-  void* conv2d_26_w =  readTrainedWeights(conv2d_26_w_path.c_str(), 0,96,576,1,1); 
-  std::string batch_normalization_38_gamma_path =  dir_prefix + std::string("batch_normalization_38_gamma.bin"); 
-  void* batch_normalization_38_gamma =  readTrainedWeights(batch_normalization_38_gamma_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_38_beta_path =  dir_prefix + std::string("batch_normalization_38_beta.bin"); 
-  void* batch_normalization_38_beta =  readTrainedWeights(batch_normalization_38_beta_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_38_mean_path =  dir_prefix + std::string("batch_normalization_38_mean.bin"); 
-  void* batch_normalization_38_mean =  readTrainedWeights(batch_normalization_38_mean_path.c_str(), 0,1,96,1,1); 
-  std::string batch_normalization_38_variance_path =  dir_prefix + std::string("batch_normalization_38_variance.bin"); 
-  void* batch_normalization_38_variance =  readTrainedWeights(batch_normalization_38_variance_path.c_str(), 0,1,96,1,1); 
-  std::string conv2d_27_w_path =  dir_prefix + std::string("conv2d_27_w.bin"); 
-  void* conv2d_27_w =  readTrainedWeights(conv2d_27_w_path.c_str(), 0,576,96,1,1); 
-  std::string batch_normalization_39_gamma_path =  dir_prefix + std::string("batch_normalization_39_gamma.bin"); 
-  void* batch_normalization_39_gamma =  readTrainedWeights(batch_normalization_39_gamma_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_39_beta_path =  dir_prefix + std::string("batch_normalization_39_beta.bin"); 
-  void* batch_normalization_39_beta =  readTrainedWeights(batch_normalization_39_beta_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_39_mean_path =  dir_prefix + std::string("batch_normalization_39_mean.bin"); 
-  void* batch_normalization_39_mean =  readTrainedWeights(batch_normalization_39_mean_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_39_variance_path =  dir_prefix + std::string("batch_normalization_39_variance.bin"); 
-  void* batch_normalization_39_variance =  readTrainedWeights(batch_normalization_39_variance_path.c_str(), 0,1,576,1,1); 
-  std::string depthwise_conv2d_14_w_path =  dir_prefix + std::string("depthwise_conv2d_14_w.bin"); 
-  void* depthwise_conv2d_14_w =  readTrainedWeights(depthwise_conv2d_14_w_path.c_str(), 0,576,1,3,3); 
-  std::string batch_normalization_40_gamma_path =  dir_prefix + std::string("batch_normalization_40_gamma.bin"); 
-  void* batch_normalization_40_gamma =  readTrainedWeights(batch_normalization_40_gamma_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_40_beta_path =  dir_prefix + std::string("batch_normalization_40_beta.bin"); 
-  void* batch_normalization_40_beta =  readTrainedWeights(batch_normalization_40_beta_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_40_mean_path =  dir_prefix + std::string("batch_normalization_40_mean.bin"); 
-  void* batch_normalization_40_mean =  readTrainedWeights(batch_normalization_40_mean_path.c_str(), 0,1,576,1,1); 
-  std::string batch_normalization_40_variance_path =  dir_prefix + std::string("batch_normalization_40_variance.bin"); 
-  void* batch_normalization_40_variance =  readTrainedWeights(batch_normalization_40_variance_path.c_str(), 0,1,576,1,1); 
-  std::string conv2d_28_w_path =  dir_prefix + std::string("conv2d_28_w.bin"); 
-  void* conv2d_28_w =  readTrainedWeights(conv2d_28_w_path.c_str(), 0,160,576,1,1); 
-  std::string batch_normalization_41_gamma_path =  dir_prefix + std::string("batch_normalization_41_gamma.bin"); 
-  void* batch_normalization_41_gamma =  readTrainedWeights(batch_normalization_41_gamma_path.c_str(), 0,1,160,1,1); 
-  std::string batch_normalization_41_beta_path =  dir_prefix + std::string("batch_normalization_41_beta.bin"); 
-  void* batch_normalization_41_beta =  readTrainedWeights(batch_normalization_41_beta_path.c_str(), 0,1,160,1,1); 
-  std::string batch_normalization_41_mean_path =  dir_prefix + std::string("batch_normalization_41_mean.bin"); 
-  void* batch_normalization_41_mean =  readTrainedWeights(batch_normalization_41_mean_path.c_str(), 0,1,160,1,1); 
-  std::string batch_normalization_41_variance_path =  dir_prefix + std::string("batch_normalization_41_variance.bin"); 
-  void* batch_normalization_41_variance =  readTrainedWeights(batch_normalization_41_variance_path.c_str(), 0,1,160,1,1); 
-  std::string conv2d_29_w_path =  dir_prefix + std::string("conv2d_29_w.bin"); 
-  void* conv2d_29_w =  readTrainedWeights(conv2d_29_w_path.c_str(), 0,960,160,1,1); 
-  std::string batch_normalization_42_gamma_path =  dir_prefix + std::string("batch_normalization_42_gamma.bin"); 
-  void* batch_normalization_42_gamma =  readTrainedWeights(batch_normalization_42_gamma_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_42_beta_path =  dir_prefix + std::string("batch_normalization_42_beta.bin"); 
-  void* batch_normalization_42_beta =  readTrainedWeights(batch_normalization_42_beta_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_42_mean_path =  dir_prefix + std::string("batch_normalization_42_mean.bin"); 
-  void* batch_normalization_42_mean =  readTrainedWeights(batch_normalization_42_mean_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_42_variance_path =  dir_prefix + std::string("batch_normalization_42_variance.bin"); 
-  void* batch_normalization_42_variance =  readTrainedWeights(batch_normalization_42_variance_path.c_str(), 0,1,960,1,1); 
-  std::string depthwise_conv2d_15_w_path =  dir_prefix + std::string("depthwise_conv2d_15_w.bin"); 
-  void* depthwise_conv2d_15_w =  readTrainedWeights(depthwise_conv2d_15_w_path.c_str(), 0,960,1,3,3); 
-  std::string batch_normalization_43_gamma_path =  dir_prefix + std::string("batch_normalization_43_gamma.bin"); 
-  void* batch_normalization_43_gamma =  readTrainedWeights(batch_normalization_43_gamma_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_43_beta_path =  dir_prefix + std::string("batch_normalization_43_beta.bin"); 
-  void* batch_normalization_43_beta =  readTrainedWeights(batch_normalization_43_beta_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_43_mean_path =  dir_prefix + std::string("batch_normalization_43_mean.bin"); 
-  void* batch_normalization_43_mean =  readTrainedWeights(batch_normalization_43_mean_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_43_variance_path =  dir_prefix + std::string("batch_normalization_43_variance.bin"); 
-  void* batch_normalization_43_variance =  readTrainedWeights(batch_normalization_43_variance_path.c_str(), 0,1,960,1,1); 
-  std::string conv2d_30_w_path =  dir_prefix + std::string("conv2d_30_w.bin"); 
-  void* conv2d_30_w =  readTrainedWeights(conv2d_30_w_path.c_str(), 0,160,960,1,1); 
-  std::string batch_normalization_44_gamma_path =  dir_prefix + std::string("batch_normalization_44_gamma.bin"); 
-  void* batch_normalization_44_gamma =  readTrainedWeights(batch_normalization_44_gamma_path.c_str(), 0,1,160,1,1); 
-  std::string batch_normalization_44_beta_path =  dir_prefix + std::string("batch_normalization_44_beta.bin"); 
-  void* batch_normalization_44_beta =  readTrainedWeights(batch_normalization_44_beta_path.c_str(), 0,1,160,1,1); 
-  std::string batch_normalization_44_mean_path =  dir_prefix + std::string("batch_normalization_44_mean.bin"); 
-  void* batch_normalization_44_mean =  readTrainedWeights(batch_normalization_44_mean_path.c_str(), 0,1,160,1,1); 
-  std::string batch_normalization_44_variance_path =  dir_prefix + std::string("batch_normalization_44_variance.bin"); 
-  void* batch_normalization_44_variance =  readTrainedWeights(batch_normalization_44_variance_path.c_str(), 0,1,160,1,1); 
-  std::string conv2d_31_w_path =  dir_prefix + std::string("conv2d_31_w.bin"); 
-  void* conv2d_31_w =  readTrainedWeights(conv2d_31_w_path.c_str(), 0,960,160,1,1); 
-  std::string batch_normalization_45_gamma_path =  dir_prefix + std::string("batch_normalization_45_gamma.bin"); 
-  void* batch_normalization_45_gamma =  readTrainedWeights(batch_normalization_45_gamma_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_45_beta_path =  dir_prefix + std::string("batch_normalization_45_beta.bin"); 
-  void* batch_normalization_45_beta =  readTrainedWeights(batch_normalization_45_beta_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_45_mean_path =  dir_prefix + std::string("batch_normalization_45_mean.bin"); 
-  void* batch_normalization_45_mean =  readTrainedWeights(batch_normalization_45_mean_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_45_variance_path =  dir_prefix + std::string("batch_normalization_45_variance.bin"); 
-  void* batch_normalization_45_variance =  readTrainedWeights(batch_normalization_45_variance_path.c_str(), 0,1,960,1,1); 
-  std::string depthwise_conv2d_16_w_path =  dir_prefix + std::string("depthwise_conv2d_16_w.bin"); 
-  void* depthwise_conv2d_16_w =  readTrainedWeights(depthwise_conv2d_16_w_path.c_str(), 0,960,1,3,3); 
-  std::string batch_normalization_46_gamma_path =  dir_prefix + std::string("batch_normalization_46_gamma.bin"); 
-  void* batch_normalization_46_gamma =  readTrainedWeights(batch_normalization_46_gamma_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_46_beta_path =  dir_prefix + std::string("batch_normalization_46_beta.bin"); 
-  void* batch_normalization_46_beta =  readTrainedWeights(batch_normalization_46_beta_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_46_mean_path =  dir_prefix + std::string("batch_normalization_46_mean.bin"); 
-  void* batch_normalization_46_mean =  readTrainedWeights(batch_normalization_46_mean_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_46_variance_path =  dir_prefix + std::string("batch_normalization_46_variance.bin"); 
-  void* batch_normalization_46_variance =  readTrainedWeights(batch_normalization_46_variance_path.c_str(), 0,1,960,1,1); 
-  std::string conv2d_32_w_path =  dir_prefix + std::string("conv2d_32_w.bin"); 
-  void* conv2d_32_w =  readTrainedWeights(conv2d_32_w_path.c_str(), 0,160,960,1,1); 
-  std::string batch_normalization_47_gamma_path =  dir_prefix + std::string("batch_normalization_47_gamma.bin"); 
-  void* batch_normalization_47_gamma =  readTrainedWeights(batch_normalization_47_gamma_path.c_str(), 0,1,160,1,1); 
-  std::string batch_normalization_47_beta_path =  dir_prefix + std::string("batch_normalization_47_beta.bin"); 
-  void* batch_normalization_47_beta =  readTrainedWeights(batch_normalization_47_beta_path.c_str(), 0,1,160,1,1); 
-  std::string batch_normalization_47_mean_path =  dir_prefix + std::string("batch_normalization_47_mean.bin"); 
-  void* batch_normalization_47_mean =  readTrainedWeights(batch_normalization_47_mean_path.c_str(), 0,1,160,1,1); 
-  std::string batch_normalization_47_variance_path =  dir_prefix + std::string("batch_normalization_47_variance.bin"); 
-  void* batch_normalization_47_variance =  readTrainedWeights(batch_normalization_47_variance_path.c_str(), 0,1,160,1,1); 
-  std::string conv2d_33_w_path =  dir_prefix + std::string("conv2d_33_w.bin"); 
-  void* conv2d_33_w =  readTrainedWeights(conv2d_33_w_path.c_str(), 0,960,160,1,1); 
-  std::string batch_normalization_48_gamma_path =  dir_prefix + std::string("batch_normalization_48_gamma.bin"); 
-  void* batch_normalization_48_gamma =  readTrainedWeights(batch_normalization_48_gamma_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_48_beta_path =  dir_prefix + std::string("batch_normalization_48_beta.bin"); 
-  void* batch_normalization_48_beta =  readTrainedWeights(batch_normalization_48_beta_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_48_mean_path =  dir_prefix + std::string("batch_normalization_48_mean.bin"); 
-  void* batch_normalization_48_mean =  readTrainedWeights(batch_normalization_48_mean_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_48_variance_path =  dir_prefix + std::string("batch_normalization_48_variance.bin"); 
-  void* batch_normalization_48_variance =  readTrainedWeights(batch_normalization_48_variance_path.c_str(), 0,1,960,1,1); 
-  std::string depthwise_conv2d_17_w_path =  dir_prefix + std::string("depthwise_conv2d_17_w.bin"); 
-  void* depthwise_conv2d_17_w =  readTrainedWeights(depthwise_conv2d_17_w_path.c_str(), 0,960,1,3,3); 
-  std::string batch_normalization_49_gamma_path =  dir_prefix + std::string("batch_normalization_49_gamma.bin"); 
-  void* batch_normalization_49_gamma =  readTrainedWeights(batch_normalization_49_gamma_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_49_beta_path =  dir_prefix + std::string("batch_normalization_49_beta.bin"); 
-  void* batch_normalization_49_beta =  readTrainedWeights(batch_normalization_49_beta_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_49_mean_path =  dir_prefix + std::string("batch_normalization_49_mean.bin"); 
-  void* batch_normalization_49_mean =  readTrainedWeights(batch_normalization_49_mean_path.c_str(), 0,1,960,1,1); 
-  std::string batch_normalization_49_variance_path =  dir_prefix + std::string("batch_normalization_49_variance.bin"); 
-  void* batch_normalization_49_variance =  readTrainedWeights(batch_normalization_49_variance_path.c_str(), 0,1,960,1,1); 
-  std::string conv2d_34_w_path =  dir_prefix + std::string("conv2d_34_w.bin"); 
-  void* conv2d_34_w =  readTrainedWeights(conv2d_34_w_path.c_str(), 0,320,960,1,1); 
-  std::string batch_normalization_50_gamma_path =  dir_prefix + std::string("batch_normalization_50_gamma.bin"); 
-  void* batch_normalization_50_gamma =  readTrainedWeights(batch_normalization_50_gamma_path.c_str(), 0,1,320,1,1); 
-  std::string batch_normalization_50_beta_path =  dir_prefix + std::string("batch_normalization_50_beta.bin"); 
-  void* batch_normalization_50_beta =  readTrainedWeights(batch_normalization_50_beta_path.c_str(), 0,1,320,1,1); 
-  std::string batch_normalization_50_mean_path =  dir_prefix + std::string("batch_normalization_50_mean.bin"); 
-  void* batch_normalization_50_mean =  readTrainedWeights(batch_normalization_50_mean_path.c_str(), 0,1,320,1,1); 
-  std::string batch_normalization_50_variance_path =  dir_prefix + std::string("batch_normalization_50_variance.bin"); 
-  void* batch_normalization_50_variance =  readTrainedWeights(batch_normalization_50_variance_path.c_str(), 0,1,320,1,1); 
-  std::string conv2d_35_w_path =  dir_prefix + std::string("conv2d_35_w.bin"); 
-  void* conv2d_35_w =  readTrainedWeights(conv2d_35_w_path.c_str(), 0,1280,320,1,1); 
-  std::string batch_normalization_51_gamma_path =  dir_prefix + std::string("batch_normalization_51_gamma.bin"); 
-  void* batch_normalization_51_gamma =  readTrainedWeights(batch_normalization_51_gamma_path.c_str(), 0,1,1280,1,1); 
-  std::string batch_normalization_51_beta_path =  dir_prefix + std::string("batch_normalization_51_beta.bin"); 
-  void* batch_normalization_51_beta =  readTrainedWeights(batch_normalization_51_beta_path.c_str(), 0,1,1280,1,1); 
-  std::string batch_normalization_51_mean_path =  dir_prefix + std::string("batch_normalization_51_mean.bin"); 
-  void* batch_normalization_51_mean =  readTrainedWeights(batch_normalization_51_mean_path.c_str(), 0,1,1280,1,1); 
-  std::string batch_normalization_51_variance_path =  dir_prefix + std::string("batch_normalization_51_variance.bin"); 
-  void* batch_normalization_51_variance =  readTrainedWeights(batch_normalization_51_variance_path.c_str(), 0,1,1280,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,5120,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 10000; 
-  int batch_size = 500; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_2 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-    void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_5 = tensorBatchNorm(var_4, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-    void* var_8 = tensorBatchNorm(var_7, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_9 = tensorConvolution(var_8, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-    void* var_10 = tensorBatchNorm(var_9, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-    void* var_11 = tensorRelu(var_10); 
-    void* var_13 = tensorConvolution(var_11, depthwise_conv2d_2_w, 1, 1, 1, 1, 1, 96); 
-    void* var_14 = tensorBatchNorm(var_13, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-    void* var_15 = tensorRelu(var_14); 
-    void* var_16 = tensorConvolution(var_15, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-    void* var_17 = tensorBatchNorm(var_16, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-    void* var_18 = tensorConvolution(var_17, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-    void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-    void* var_20 = tensorRelu(var_19); 
-    void* var_22 = tensorConvolution(var_20, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 144); 
-    void* var_23 = tensorBatchNorm(var_22, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-    void* var_24 = tensorRelu(var_23); 
-    void* var_25 = tensorConvolution(var_24, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-    void* var_26 = tensorBatchNorm(var_25, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-    void* var_27 = tensorAdd(var_17, var_26); 
-    void* var_28 = tensorConvolution(var_27, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-    void* var_29 = tensorBatchNorm(var_28, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-    void* var_30 = tensorRelu(var_29); 
-    void* var_32 = tensorConvolution(var_30, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 144); 
-    void* var_33 = tensorBatchNorm(var_32, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-    void* var_34 = tensorRelu(var_33); 
-    void* var_35 = tensorConvolution(var_34, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-    void* var_36 = tensorBatchNorm(var_35, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-    void* var_37 = tensorConvolution(var_36, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-    void* var_38 = tensorBatchNorm(var_37, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-    void* var_39 = tensorRelu(var_38); 
-    void* var_41 = tensorConvolution(var_39, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 192); 
-    void* var_42 = tensorBatchNorm(var_41, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_44 = tensorConvolution(var_43, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-    void* var_45 = tensorBatchNorm(var_44, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-    void* var_46 = tensorAdd(var_36, var_45); 
-    void* var_47 = tensorConvolution(var_46, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-    void* var_48 = tensorBatchNorm(var_47, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-    void* var_49 = tensorRelu(var_48); 
-    void* var_51 = tensorConvolution(var_49, depthwise_conv2d_6_w, 1, 1, 1, 1, 1, 192); 
-    void* var_52 = tensorBatchNorm(var_51, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-    void* var_53 = tensorRelu(var_52); 
-    void* var_54 = tensorConvolution(var_53, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-    void* var_55 = tensorBatchNorm(var_54, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-    void* var_56 = tensorAdd(var_46, var_55); 
-    void* var_57 = tensorConvolution(var_56, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-    void* var_58 = tensorBatchNorm(var_57, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-    void* var_59 = tensorRelu(var_58); 
-    void* var_61 = tensorConvolution(var_59, depthwise_conv2d_7_w, 1, 1, 2, 2, 1, 192); 
-    void* var_62 = tensorBatchNorm(var_61, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-    void* var_63 = tensorRelu(var_62); 
-    void* var_64 = tensorConvolution(var_63, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-    void* var_65 = tensorBatchNorm(var_64, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-    void* var_66 = tensorConvolution(var_65, conv2d_15_w, 0, 0, 1, 1, 1, 1); 
-    void* var_67 = tensorBatchNorm(var_66, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-    void* var_68 = tensorRelu(var_67); 
-    void* var_70 = tensorConvolution(var_68, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 384); 
-    void* var_71 = tensorBatchNorm(var_70, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-    void* var_72 = tensorRelu(var_71); 
-    void* var_73 = tensorConvolution(var_72, conv2d_16_w, 0, 0, 1, 1, 1, 1); 
-    void* var_74 = tensorBatchNorm(var_73, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-    void* var_75 = tensorAdd(var_65, var_74); 
-    void* var_76 = tensorConvolution(var_75, conv2d_17_w, 0, 0, 1, 1, 1, 1); 
-    void* var_77 = tensorBatchNorm(var_76, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-    void* var_78 = tensorRelu(var_77); 
-    void* var_80 = tensorConvolution(var_78, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 384); 
-    void* var_81 = tensorBatchNorm(var_80, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-    void* var_82 = tensorRelu(var_81); 
-    void* var_83 = tensorConvolution(var_82, conv2d_18_w, 0, 0, 1, 1, 1, 1); 
-    void* var_84 = tensorBatchNorm(var_83, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-    void* var_85 = tensorAdd(var_75, var_84); 
-    void* var_86 = tensorConvolution(var_85, conv2d_19_w, 0, 0, 1, 1, 1, 1); 
-    void* var_87 = tensorBatchNorm(var_86, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-    void* var_88 = tensorRelu(var_87); 
-    void* var_90 = tensorConvolution(var_88, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 384); 
-    void* var_91 = tensorBatchNorm(var_90, batch_normalization_28_gamma, batch_normalization_28_beta, batch_normalization_28_mean, batch_normalization_28_variance, 0.001); 
-    void* var_92 = tensorRelu(var_91); 
-    void* var_93 = tensorConvolution(var_92, conv2d_20_w, 0, 0, 1, 1, 1, 1); 
-    void* var_94 = tensorBatchNorm(var_93, batch_normalization_29_gamma, batch_normalization_29_beta, batch_normalization_29_mean, batch_normalization_29_variance, 0.001); 
-    void* var_95 = tensorAdd(var_85, var_94); 
-    void* var_97 = tensorConvolution(var_95, conv2d_21_w, 0, 0, 1, 1, 1, 1); 
-    void* var_98 = tensorBatchNorm(var_97, batch_normalization_30_gamma, batch_normalization_30_beta, batch_normalization_30_mean, batch_normalization_30_variance, 0.001); 
-    void* var_99 = tensorRelu(var_98); 
-    void* var_101 = tensorConvolution(var_99, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 384); 
-    void* var_102 = tensorBatchNorm(var_101, batch_normalization_31_gamma, batch_normalization_31_beta, batch_normalization_31_mean, batch_normalization_31_variance, 0.001); 
-    void* var_103 = tensorRelu(var_102); 
-    void* var_104 = tensorConvolution(var_103, conv2d_22_w, 0, 0, 1, 1, 1, 1); 
-    void* var_105 = tensorBatchNorm(var_104, batch_normalization_32_gamma, batch_normalization_32_beta, batch_normalization_32_mean, batch_normalization_32_variance, 0.001); 
-    void* var_106 = tensorConvolution(var_105, conv2d_23_w, 0, 0, 1, 1, 1, 1); 
-    void* var_107 = tensorBatchNorm(var_106, batch_normalization_33_gamma, batch_normalization_33_beta, batch_normalization_33_mean, batch_normalization_33_variance, 0.001); 
-    void* var_108 = tensorRelu(var_107); 
-    void* var_110 = tensorConvolution(var_108, depthwise_conv2d_12_w, 1, 1, 1, 1, 1, 576); 
-    void* var_111 = tensorBatchNorm(var_110, batch_normalization_34_gamma, batch_normalization_34_beta, batch_normalization_34_mean, batch_normalization_34_variance, 0.001); 
-    void* var_112 = tensorRelu(var_111); 
-    void* var_113 = tensorConvolution(var_112, conv2d_24_w, 0, 0, 1, 1, 1, 1); 
-    void* var_114 = tensorBatchNorm(var_113, batch_normalization_35_gamma, batch_normalization_35_beta, batch_normalization_35_mean, batch_normalization_35_variance, 0.001); 
-    void* var_115 = tensorAdd(var_105, var_114); 
-    void* var_116 = tensorConvolution(var_115, conv2d_25_w, 0, 0, 1, 1, 1, 1); 
-    void* var_117 = tensorBatchNorm(var_116, batch_normalization_36_gamma, batch_normalization_36_beta, batch_normalization_36_mean, batch_normalization_36_variance, 0.001); 
-    void* var_118 = tensorRelu(var_117); 
-    void* var_120 = tensorConvolution(var_118, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 576); 
-    void* var_121 = tensorBatchNorm(var_120, batch_normalization_37_gamma, batch_normalization_37_beta, batch_normalization_37_mean, batch_normalization_37_variance, 0.001); 
-    void* var_122 = tensorRelu(var_121); 
-    void* var_123 = tensorConvolution(var_122, conv2d_26_w, 0, 0, 1, 1, 1, 1); 
-    void* var_124 = tensorBatchNorm(var_123, batch_normalization_38_gamma, batch_normalization_38_beta, batch_normalization_38_mean, batch_normalization_38_variance, 0.001); 
-    void* var_125 = tensorAdd(var_115, var_124); 
-    void* var_127 = tensorConvolution(var_125, conv2d_27_w, 0, 0, 1, 1, 1, 1); 
-    void* var_128 = tensorBatchNorm(var_127, batch_normalization_39_gamma, batch_normalization_39_beta, batch_normalization_39_mean, batch_normalization_39_variance, 0.001); 
-    void* var_129 = tensorRelu(var_128); 
-    void* var_131 = tensorConvolution(var_129, depthwise_conv2d_14_w, 1, 1, 2, 2, 1, 576); 
-    void* var_132 = tensorBatchNorm(var_131, batch_normalization_40_gamma, batch_normalization_40_beta, batch_normalization_40_mean, batch_normalization_40_variance, 0.001); 
-    void* var_133 = tensorRelu(var_132); 
-    void* var_134 = tensorConvolution(var_133, conv2d_28_w, 0, 0, 1, 1, 1, 1); 
-    void* var_135 = tensorBatchNorm(var_134, batch_normalization_41_gamma, batch_normalization_41_beta, batch_normalization_41_mean, batch_normalization_41_variance, 0.001); 
-    void* var_136 = tensorConvolution(var_135, conv2d_29_w, 0, 0, 1, 1, 1, 1); 
-    void* var_137 = tensorBatchNorm(var_136, batch_normalization_42_gamma, batch_normalization_42_beta, batch_normalization_42_mean, batch_normalization_42_variance, 0.001); 
-    void* var_138 = tensorRelu(var_137); 
-    void* var_140 = tensorConvolution(var_138, depthwise_conv2d_15_w, 1, 1, 1, 1, 1, 960); 
-    void* var_141 = tensorBatchNorm(var_140, batch_normalization_43_gamma, batch_normalization_43_beta, batch_normalization_43_mean, batch_normalization_43_variance, 0.001); 
-    void* var_142 = tensorRelu(var_141); 
-    void* var_143 = tensorConvolution(var_142, conv2d_30_w, 0, 0, 1, 1, 1, 1); 
-    void* var_144 = tensorBatchNorm(var_143, batch_normalization_44_gamma, batch_normalization_44_beta, batch_normalization_44_mean, batch_normalization_44_variance, 0.001); 
-    void* var_145 = tensorAdd(var_135, var_144); 
-    void* var_146 = tensorConvolution(var_145, conv2d_31_w, 0, 0, 1, 1, 1, 1); 
-    void* var_147 = tensorBatchNorm(var_146, batch_normalization_45_gamma, batch_normalization_45_beta, batch_normalization_45_mean, batch_normalization_45_variance, 0.001); 
-    void* var_148 = tensorRelu(var_147); 
-    void* var_150 = tensorConvolution(var_148, depthwise_conv2d_16_w, 1, 1, 1, 1, 1, 960); 
-    void* var_151 = tensorBatchNorm(var_150, batch_normalization_46_gamma, batch_normalization_46_beta, batch_normalization_46_mean, batch_normalization_46_variance, 0.001); 
-    void* var_152 = tensorRelu(var_151); 
-    void* var_153 = tensorConvolution(var_152, conv2d_32_w, 0, 0, 1, 1, 1, 1); 
-    void* var_154 = tensorBatchNorm(var_153, batch_normalization_47_gamma, batch_normalization_47_beta, batch_normalization_47_mean, batch_normalization_47_variance, 0.001); 
-    void* var_155 = tensorAdd(var_145, var_154); 
-    void* var_157 = tensorConvolution(var_155, conv2d_33_w, 0, 0, 1, 1, 1, 1); 
-    void* var_158 = tensorBatchNorm(var_157, batch_normalization_48_gamma, batch_normalization_48_beta, batch_normalization_48_mean, batch_normalization_48_variance, 0.001); 
-    void* var_159 = tensorRelu(var_158); 
-    void* var_161 = tensorConvolution(var_159, depthwise_conv2d_17_w, 1, 1, 1, 1, 1, 960); 
-    void* var_162 = tensorBatchNorm(var_161, batch_normalization_49_gamma, batch_normalization_49_beta, batch_normalization_49_mean, batch_normalization_49_variance, 0.001); 
-    void* var_163 = tensorRelu(var_162); 
-    void* var_164 = tensorConvolution(var_163, conv2d_34_w, 0, 0, 1, 1, 1, 1); 
-    void* var_165 = tensorBatchNorm(var_164, batch_normalization_50_gamma, batch_normalization_50_beta, batch_normalization_50_mean, batch_normalization_50_variance, 0.001); 
-    void* var_167 = tensorConvolution(var_165, conv2d_35_w, 0, 0, 1, 1, 1, 1); 
-    void* var_168 = tensorBatchNorm(var_167, batch_normalization_51_gamma, batch_normalization_51_beta, batch_normalization_51_mean, batch_normalization_51_variance, 0.001); 
-    void* var_169 = tensorRelu(var_168); 
-    void* var_170 = tensorPooling(var_169,1,2,2,0,0,2,2); 
-    void* var_172 = tensorGemmGPU(var_170, dense_1_w); 
-    void* var_173 = tensorAdd(var_172, dense_1_b); 
-    void* var_174 = tensorSoftmax(var_173); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_174); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline.cc
deleted file mode 100644
index b694f007b2e1c4cbe71bbe53c1065888542b23f1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline.cc
+++ /dev/null
@@ -1,161 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Pipeline: Gaussian - Outline - Motion Blur - Emboss ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  //long int test_batch_size = 9145;
-  long int test_batch_size = 2000;
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/caltech101_255_float32.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-  printf("Reading golden output\n");
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/caltech-G-O-M-E-FP32-clipped-2000.bin",
-  //void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/caltech-gaussian.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-
-    void * gaussian_out_clip = tensorRelu2(gaussian_out, 0, 255);
-
-    void* outline_out = tensorConvolution(gaussian_out_clip, outline_filter, 1, 1, 1, 1,
-                                       conv_mode, conv_precision);
-    void * outline_out_clip = tensorRelu2(outline_out, 0, 255);
-
-    void* motionblur_out = tensorConvolution(outline_out_clip, motionblur_filter, 4, 4, 1, 1,
-                                       conv_mode, conv_precision);
-    void * motionblur_out_clip = tensorRelu2(motionblur_out, 0, 255);
-
-    void* emboss_out = tensorConvolution(motionblur_out_clip, emboss_filter, 2, 2, 1, 1,
-                                       conv_mode, conv_precision);
-    void* emboss_bias_out = tensorAdd(emboss_out, emboss_bias);
-    void* result = tensorRelu2(emboss_bias_out, 0, 255);
-    //void* result = gaussian_out;
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    hpvm_request_tensor(result, 0);
-    
-    dumpOutput(result);
-    
-  //void* psnr_output = readTrainedWeights("../model_params/pipeline/golden_output/caltech-gaussian.bin",
-                                        //float_type,
-                                        //test_batch_size, 1, H, W);
-    computePSNRViolation(result, golden_output, 30);
-
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GEMO.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GEMO.cc
deleted file mode 100644
index 199a29a73af1a98fac31ae55f93c8bc8e7e2d6d4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GEMO.cc
+++ /dev/null
@@ -1,153 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Pipeline: Gaussian - Emboss - Motion Blur - Outline ********** \n");
-
-  //long int test_batch_size = 9145;
-  long int test_batch_size = 1000;
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-				   float_type,
-				   test_batch_size, 1, H, W);
-  
-  printf("Reading golden output\n");
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEMO_calib.bin",
-  					   float_type,
-  					   test_batch_size, 1, H, W);
-
-
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-
-
-
-  //printTensorValues(input);
-  //printTensorValues(motionblur_filter);
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-      
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-
-    void * gaussian_out_clip = tensorRelu2(gaussian_out, 0, 255);
-
-    void* emboss_out = tensorConvolution(gaussian_out_clip, emboss_filter, 2, 2, 1, 1,
-                                       conv_mode, conv_precision);
-    void* emboss_bias_out = tensorAdd(emboss_out, emboss_bias);
-    void* emboss_bias_out_clip = tensorRelu2(emboss_bias_out, 0, 255);
-
-    void* motionblur_out = tensorConvolution(emboss_bias_out_clip, motionblur_filter, 4, 4, 1, 1,
-                                       conv_mode, conv_precision);
-    void * motionblur_out_clip = tensorRelu2(motionblur_out, 0, 255);
-
-    void* outline_out = tensorConvolution(motionblur_out_clip, outline_filter, 1, 1, 1, 1,
-                                       conv_mode, conv_precision);
-    void* result = tensorRelu2(outline_out, 0, 255);
-
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GEMO_calib.bin");
-        
-    computePSNRViolation(result, golden_output, 30);
-
-   
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GEO.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GEO.cc
deleted file mode 100644
index c2d6d1fb4dd3f8e0fe25db4f2628700a60aa44da..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GEO.cc
+++ /dev/null
@@ -1,147 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Pipeline: Gaussian - Emboss - Outline ********** \n");
-
-  //long int test_batch_size = 9145;
-  long int test_batch_size = 1000;
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-  printf("Reading golden output\n");
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEO_calib.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-
-    void * gaussian_out_clip = tensorRelu2(gaussian_out, 0, 255);
-
-    void* emboss_out = tensorConvolution(gaussian_out_clip, emboss_filter, 2, 2, 1, 1,
-                                       conv_mode, conv_precision);
-    void* emboss_bias_out = tensorAdd(emboss_out, emboss_bias);
-    void* emboss_bias_out_clip = tensorRelu2(emboss_bias_out, 0, 255);
-
-    void* outline_out = tensorConvolution(emboss_bias_out_clip, outline_filter, 1, 1, 1, 1,
-                                       conv_mode, conv_precision);
-    void * result = tensorRelu2(outline_out, 0, 255);
-
-    //void* result = gaussian_out;
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GEO_calib.bin");
-    
-    computePSNRViolation(result, golden_output, 30);
-
-   
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GEOM.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GEOM.cc
deleted file mode 100644
index 9c219f4daac2af708f02a64d97bf84ae36047316..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GEOM.cc
+++ /dev/null
@@ -1,151 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Pipeline: Gaussian - Emboss - Outline - Motion Blur ********** \n");
-  //long int test_batch_size = 9145;
-  long int test_batch_size = 1000;
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W); 
-  
-  printf("Reading golden output\n");
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEOM_calib.bin",
-					   float_type,
-					   test_batch_size, 1, H, W);
-
-
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-
-    void * gaussian_out_clip = tensorRelu2(gaussian_out, 0, 255);
-
-    void* emboss_out = tensorConvolution(gaussian_out_clip, emboss_filter, 2, 2, 1, 1,
-                                       conv_mode, conv_precision);
-    void* emboss_bias_out = tensorAdd(emboss_out, emboss_bias);
-    void* emboss_bias_out_clip = tensorRelu2(emboss_bias_out, 0, 255);
-
-    void* outline_out = tensorConvolution(emboss_bias_out_clip, outline_filter, 1, 1, 1, 1,
-                                       conv_mode, conv_precision);
-    void * outline_out_clip = tensorRelu2(outline_out, 0, 255);
-
-    void* motionblur_out = tensorConvolution(outline_out_clip, motionblur_filter, 4, 4, 1, 1,
-                                       conv_mode, conv_precision);
-    void * result = tensorRelu2(motionblur_out, 0, 255);
-
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GEOM_calib.bin");
-    
-    computePSNRViolation(result, golden_output, 30);
-
-    
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GSM.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GSM.cc
deleted file mode 100644
index 721eb887ca0a6cd9e6cb5c992f7c0559716a1259..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GSM.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Pipeline: Gaussian - Sharpen - Motion Blur ********** \n");
-
-  //long int test_batch_size = 9145;
-  long int test_batch_size = 1000;
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-  printf("Reading golden output\n");
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GSM_calib.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-    void * gaussian_out_clip = tensorRelu2(gaussian_out, 0, 255);
-
-    void* sharpen_out = tensorConvolution(gaussian_out_clip, sharpen_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    void * sharpen_out_clip = tensorRelu2(sharpen_out, 0, 255);
-
-    void* motionblur_out = tensorConvolution(sharpen_out_clip, motionblur_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-    void * result = tensorRelu2(motionblur_out, 0, 255);
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GSM_calib.bin");
-    
-    computePSNRViolation(result, golden_output, 30);
-
-    
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GSME.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GSME.cc
deleted file mode 100644
index 19f0210e83939568ae2ca9b198ccfa95be70113e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/pipeline_GSME.cc
+++ /dev/null
@@ -1,153 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  
-  printf("********* Pipeline: Gaussian - Sharpen - Motion Blur - Emboss ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  //long int test_batch_size = 9145;
-  long int test_batch_size = 1000;
-  long int H = 240;
-  long int W = 300;
-
-  printf("Reading input\n");
-  void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-				   float_type,
-				   test_batch_size, 1, H, W);
-  
-  printf("Reading golden output\n");
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GSME_calib.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-
-  void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-					  float_type, 1, 1, 3, 3);    
-  void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-					  float_type, 1, 1, 9, 9);    
-  void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-					  float_type, 1, 1, 5, 5);  
-  void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-					  float_type, 1, 1, 1, 1);  
-  
-  clearTensorMap();
-  
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-  
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* gaussian_out = tensorConvolution(input, gaussian_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-    void * gaussian_out_clip = tensorRelu2(gaussian_out, 0, 255);
-
-    void* sharpen_out = tensorConvolution(gaussian_out_clip, sharpen_filter, 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-    void * sharpen_out_clip = tensorRelu2(sharpen_out, 0, 255);
-
-    void* motionblur_out = tensorConvolution(sharpen_out_clip, motionblur_filter, 4, 4, 1, 1,
-				       conv_mode, conv_precision);
-    void * motionblur_out_clip = tensorRelu2(motionblur_out, 0, 255);
-
-    void* emboss_out = tensorConvolution(motionblur_out_clip, emboss_filter, 2, 2, 1, 1,
-                                       conv_mode, conv_precision);
-    void* emboss_bias_out = tensorAdd(emboss_out, emboss_bias);
-    void* result = tensorRelu2(emboss_bias_out, 0, 255);
-
-
-    //void* result = gaussian_out;
-
-    // NOTE-IMP: Important to include this call always before doing dumpOutput and computePSNViolation
-    hpvm_request_tensor(result, 0);
-    
-    //dumpOutput(result, "GSME_calib.bin");
-    
-    computePSNRViolation(result, golden_output, 30);
-
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/alexnet2_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/alexnet2_profiling.cc
deleted file mode 100644
index 50df874874592a94238e596189b6a477fb66f05f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/alexnet2_profiling.cc
+++ /dev/null
@@ -1,166 +0,0 @@
-// Per tensor operation
-
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  printf("********* Alexnet2 CIFAR-10 DNN ********** \n");
- 
-  std::string dir_prefix = std::string("../model_params/alexnet2_cifar10/"); 
-  std::string input_path =  dir_prefix + std::string("norm_cifar_input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("test_labels.bin"); 
-
-  void* conv1_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv1.bin",
-					  float_type, 32, 3, 3, 3);  
-  void* conv1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv2.bin",
-					  float_type, 32, 32, 3, 3);  
-  void* conv2_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv2_bias.bin",
-					float_type, 1, 32, 1, 1);
-  void* conv3_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv3.bin",
-					  float_type, 64, 32, 3, 3);  
-  void* conv3_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv3_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* conv4_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv4.bin",
-					  float_type, 64, 64, 3, 3);  
-  void* conv4_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv4_bias.bin",
-					float_type, 1, 64, 1, 1);
-  void* conv5_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv5.bin",
-					  float_type, 128, 64, 3, 3);  
-  void* conv5_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv5_bias.bin",
-					float_type, 1, 128, 1, 1);
-  void* conv6_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv6.bin",
-					  float_type, 128, 128, 3, 3);  
-  void* conv6_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv6_bias.bin",
-					float_type, 1, 128, 1, 1);
-  
-  void* fc1_weights = readTrainedWeights("../model_params/alexnet2_cifar10/fc1.bin",
-					 float_type, 1, 1, 2048, 10);  
-  void* fc1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/fc1_bias.bin",
-				      float_type, 1, 10, 1, 1);  
- 
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-  startMemTracking();
-
-  int total_runs = 10;
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  double total_energy = 0.0;
-
-  for(int i = 0; i < total_runs; i++){
-    for(int i = 0; i < batch_count; i++){
-      int start = i * batch_size;
-      int end = (i + 1) * batch_size;
-      void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-
-      // FIRST Tensor Runtime CALL
-      profiler.resume_profiler();
-      void* conv1out = tensorConvolution(input, conv1_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv1out, conv1_bias); 
-      void* conv1_tanh = tensorTanh(conv1out);
-
-      // 2nd Layer
-      void* conv2out = tensorConvolution(conv1_tanh, conv2_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv2out, conv2_bias); 
-      void* conv2_tanh = tensorTanh(conv2out);
-      void* pool2out = tensorPooling(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-      // 3rd Layer
-      void* conv3out = tensorConvolution(pool2out, conv3_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv3out, conv3_bias); 
-      void* conv3_tanh = tensorTanh(conv3out);
-
-      // 4th Layer
-      void* conv4out = tensorConvolution(conv3_tanh, conv4_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv4out, conv4_bias); 
-      void* conv4_tanh = tensorTanh(conv4out);
-      void* pool4out = tensorPooling(conv4_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-      // 5th Layer
-      void* conv5out = tensorConvolution(pool4out, conv5_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv5out, conv5_bias); 
-      void* conv5_tanh = tensorTanh(conv5out);
-
-      // 6th Layer
-      void* conv6out = tensorConvolution(conv5_tanh, conv6_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      tensorAdd(conv6out, conv6_bias); 
-      void* conv6_tanh = tensorTanh(conv6out);
-      void* pool6out = tensorPooling(conv6_tanh, 0, 2, 2, 0, 0, 2, 2);
-
-      // final FC Layer
-      void* gemm1out = tensorGemmGPU(pool6out, fc1_weights); 
-      void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-      void* result = tensorSoftmax(gemm1biasout);
-
-      profiler.pause_profiler();
-      auto time_energy = profiler.get_time_energy();
-      total_time += time_energy.first;
-      total_energy += time_energy.second;
-
-      profiler.reset();
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, result); 
-      final_accuracy += accuracy;
-    
-      freeBatchMemory();
-    }
-  }
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-  
-  stopProfiling();
-  final_accuracy = (final_accuracy / batch_count) / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-}
-
-
-int main(int argc, char* argv[]){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/alexnet2_profiling_tensors.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/alexnet2_profiling_tensors.cc
deleted file mode 100644
index f95a7bda4fc581e4c40d4882304156f2420f22a5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/alexnet2_profiling_tensors.cc
+++ /dev/null
@@ -1,262 +0,0 @@
-// Per tensor operation
-
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-void add_data(std::unordered_map<std::string, std::pair<double, double> >& total_time_energies, Profiler& profiler, const std::string& op_name){
-    profiler.pause_profiler();
-    auto time_energy = profiler.get_time_energy();
-
-    auto itr = total_time_energies.find(op_name);
-    if (itr == total_time_energies.end()){
-        total_time_energies.insert(std::make_pair(op_name, time_energy));
-    } else {
-        itr->second.first += time_energy.first;
-		itr->second.second += time_energy.second;
-    }
-    profiler.reset();
-}
-
-/* NOTE: Reference Architecture to use for profiling */
-void testCifarNet(){
-
-  printf("********* Alexnet2 CIFAR-10 DNN ********** \n");
- 
-  std::string dir_prefix = std::string("../model_params/alexnet2_cifar10/"); 
-  std::string input_path =  dir_prefix + std::string("norm_cifar_input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("test_labels.bin"); 
-
-  void* conv1_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv1.bin",
-					  float_type, 32, 3, 3, 3);  
-  void* conv1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv2.bin",
-					  float_type, 32, 32, 3, 3);  
-  void* conv2_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv2_bias.bin",
-					float_type, 1, 32, 1, 1);
-  void* conv3_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv3.bin",
-					  float_type, 64, 32, 3, 3);  
-  void* conv3_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv3_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* conv4_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv4.bin",
-					  float_type, 64, 64, 3, 3);  
-  void* conv4_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv4_bias.bin",
-					float_type, 1, 64, 1, 1);
-  void* conv5_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv5.bin",
-					  float_type, 128, 64, 3, 3);  
-  void* conv5_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv5_bias.bin",
-					float_type, 1, 128, 1, 1);
-  void* conv6_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv6.bin",
-					  float_type, 128, 128, 3, 3);  
-  void* conv6_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv6_bias.bin",
-					float_type, 1, 128, 1, 1);
-  
-  void* fc1_weights = readTrainedWeights("../model_params/alexnet2_cifar10/fc1.bin",
-					 float_type, 1, 1, 2048, 10);  
-  void* fc1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/fc1_bias.bin",
-				      float_type, 1, 10, 1, 1);  
- 
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-  std::ofstream online_profiler_output;
-  online_profiler_output.open("online_output.txt");
-
-  startMemTracking();
-
-  // NOTE: CHANGED INPUT TO STANDARDIZE
-  int total_runs = 50; // FOR NOW 100;
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-
-  Profiler profiler;
-  profiler.start_profiler();
-
-  // Get the total time and energy per tensor per run 
-  std::unordered_map<std::string, std::pair<double, double> > total_time_energies;
-
-  for(int i = 0; i < total_runs; i++){
-    for(int i = 0; i < batch_count; i++){
-      int start = i * batch_size;
-      int end = (i + 1) * batch_size;
-      void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-
-      // FIRST Tensor Runtime CALL
-      profiler.resume_profiler();
-      void* conv1out = tensorConvolution(input, conv1_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      add_data(total_time_energies, profiler, "Conv1");
-       
-      profiler.resume_profiler();
-      tensorAdd(conv1out, conv1_bias); 
-      add_data(total_time_energies, profiler, "Add1");
-
-      profiler.resume_profiler();
-      void* conv1_tanh = tensorTanh(conv1out);
-      add_data(total_time_energies, profiler, "Tanh1");
-
-      // 2nd Layer
-      profiler.resume_profiler();
-      void* conv2out = tensorConvolution(conv1_tanh, conv2_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      add_data(total_time_energies, profiler, "Conv2");
-
-      profiler.resume_profiler();
-      tensorAdd(conv2out, conv2_bias); 
-      add_data(total_time_energies, profiler, "Add2");
-
-      profiler.resume_profiler();
-      void* conv2_tanh = tensorTanh(conv2out);
-      add_data(total_time_energies, profiler, "Tanh2");
-
-      profiler.resume_profiler();
-      void* pool2out = tensorPooling(conv2_tanh, 0, 2, 2, 0, 0, 2, 2);
-      add_data(total_time_energies, profiler, "Pool1");
-
-      // 3rd Layer
-      profiler.resume_profiler();
-      void* conv3out = tensorConvolution(pool2out, conv3_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      add_data(total_time_energies, profiler, "Conv3");
-
-      profiler.resume_profiler();
-      tensorAdd(conv3out, conv3_bias); 
-      add_data(total_time_energies, profiler, "Add3");
-
-      profiler.resume_profiler();
-      void* conv3_tanh = tensorTanh(conv3out);
-      add_data(total_time_energies, profiler, "Tanh3");
-
-      // 4th Layer
-      profiler.resume_profiler();
-      void* conv4out = tensorConvolution(conv3_tanh, conv4_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      add_data(total_time_energies, profiler, "Conv4");
-
-      profiler.resume_profiler();
-      tensorAdd(conv4out, conv4_bias); 
-      add_data(total_time_energies, profiler, "Add4");
-
-      profiler.resume_profiler();
-      void* conv4_tanh = tensorTanh(conv4out);
-      add_data(total_time_energies, profiler, "Tanh4");
-
-      profiler.resume_profiler();
-      void* pool4out = tensorPooling(conv4_tanh, 0, 2, 2, 0, 0, 2, 2);
-      add_data(total_time_energies, profiler, "Pool2");
-
-      // 5th Layer
-      profiler.resume_profiler();
-      void* conv5out = tensorConvolution(pool4out, conv5_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      add_data(total_time_energies, profiler, "Conv5");
-
-      profiler.resume_profiler();
-      tensorAdd(conv5out, conv5_bias); 
-      add_data(total_time_energies, profiler, "Add5");
-
-      profiler.resume_profiler();
-      void* conv5_tanh = tensorTanh(conv5out);
-      add_data(total_time_energies, profiler, "Tanh5");
-
-      // 6th Layer
-      profiler.resume_profiler();
-      void* conv6out = tensorConvolution(conv5_tanh, conv6_filter, 1, 1, 1, 1,
-					 conv_mode, conv_precision);
-      add_data(total_time_energies, profiler, "Conv6");
-
-      profiler.resume_profiler();
-      tensorAdd(conv6out, conv6_bias); 
-      add_data(total_time_energies, profiler, "Add6");
-
-      profiler.resume_profiler();
-      void* conv6_tanh = tensorTanh(conv6out);
-      add_data(total_time_energies, profiler, "Tanh6");
-
-      profiler.resume_profiler();
-      void* pool6out = tensorPooling(conv6_tanh, 0, 2, 2, 0, 0, 2, 2);
-      add_data(total_time_energies, profiler, "Pool3");
-
-      // final FC Layer
-      profiler.resume_profiler();
-      void* gemm1out = tensorGemmGPU(pool6out, fc1_weights); 
-      add_data(total_time_energies, profiler, "Mul1"); // ASSUMING that this is mul1
-
-      std::cout<<"-----------------------------------ADD 7--------------------------------\n";
-      profiler.resume_profiler();
-      void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-      add_data(total_time_energies, profiler, "Add7");
-      std::cout<<"-----------------------------------ADD 7 ENDS --------------------------------\n";
-
-      profiler.resume_profiler();
-      void* result = tensorSoftmax(gemm1biasout);
-      add_data(total_time_energies, profiler, "Softmax1");
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, result); 
-      final_accuracy += accuracy;
-    
-      freeBatchMemory();
-    }
-  }
-  profiler.stop_profiler();
-  
-  stopProfiling();
-  //online_profiler_output << "Total time: " << total_time << ", " << total_energy << "\n";
-  // Now compute the averages across batches
-  std::ofstream ofs;
-  std::string arr[] = {"Add1", "Add2", "Add3", "Add4", "Add5", "Add6", "Add7",
-                       "Conv1", "Conv2", "Conv3", "Conv4", "Conv5", "Conv6",
-                       "Mul1",
-                       "Pool1", "Pool2", "Pool3",
-                       "Softmax1",
-                       "Tanh1", "Tanh2", "Tanh3", "Tanh4", "Tanh5", "Tanh6"};
-  ofs.open("online_profiler_tensor_data.txt");
-  std::vector<std::string> ordered_keys(std::begin(arr), std::end(arr));
-  for (const std::string& key : ordered_keys){
-    const auto& data_pair = total_time_energies[key];
-    ofs << key << ": " << data_pair.first / total_runs << "\t" << data_pair.second / total_runs << '\n';
-    std::cout<< key << ": " << data_pair.first / total_runs << "\t" << data_pair.second / total_runs << '\n';
-  }
-   
-  /*
-  ofs.open("online_profiler_tensor_data.txt");
-  for (const auto& tensor_data : total_time_energies){
-    ofs << tensor_data.first << ": " << tensor_data.second.first / total_runs << "\t" << tensor_data.second.second / total_runs << '\n';
-  }*/
-  ofs.close();
-  final_accuracy = (final_accuracy / batch_count) / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-  online_profiler_output.close();
-}
-
-
-int main(int argc, char* argv[]){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testCifarNet();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/alexnet_cifar10_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/alexnet_cifar10_profiling.cc
deleted file mode 100644
index 94cef7fba14e0b9d1d9ec72b508ccd11cb560a87..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/alexnet_cifar10_profiling.cc
+++ /dev/null
@@ -1,127 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/alexnet_cifar10_front/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv0.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv_bias0.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv3.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv_bias3.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv6.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv_bias6.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv7.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv_bias7.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv8.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv_bias8.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("fc12.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("fc_bias12.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  int total_runs = 10;
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  double total_energy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-
-  for(int i = 0; i < total_runs; i++){
-      for(int i = 0; i < batch_count; i++){
-
-        int start = i * batch_size;
-        int end = (i + 1) * batch_size;
-        void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);    
-
-        profiler.resume_profiler();
-        void* var_0 = tensorConvolution(input, conv2d_1_w, 5, 5, 1, 1, 1, 0); 
-        void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-        void* var_2 = tensorTanh(var_1); 
-        void* var_3 = tensorPooling(var_2,0,2,2,0,0,2,2); 
-        void* var_5 = tensorConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 0); 
-        void* var_6 = tensorAdd(var_5, conv2d_2_b); 
-        void* var_7 = tensorTanh(var_6); 
-        void* var_8 = tensorPooling(var_7,0,2,2,0,0,2,2); 
-        void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-        void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-        void* var_12 = tensorTanh(var_11); 
-        void* var_13 = tensorConvolution(var_12, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-        void* var_14 = tensorAdd(var_13, conv2d_4_b); 
-        void* var_15 = tensorTanh(var_14); 
-        void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-        void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-        void* var_18 = tensorTanh(var_17); 
-        void* var_19 = tensorPooling(var_18,0,2,2,0,0,2,2); 
-        void* var_22 = tensorGemmGPU(var_19, dense_1_w); 
-        void* var_23 = tensorAdd(var_22, dense_1_b); 
-        void* var_24 = tensorSoftmax(var_23); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        total_energy += time_energy.second;
-        profiler.reset();
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-        float accuracy = computeAccuracy2(labels,batch_size,var_24); 
-        final_accuracy += accuracy;
-        
-        freeBatchMemory();
-      } 
-  }
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/lenet_keras_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/lenet_keras_profiling.cc
deleted file mode 100644
index f55d883c2c11f1f0721b64d4ab00de373f685a3e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/lenet_keras_profiling.cc
+++ /dev/null
@@ -1,186 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  int total_runs = 10;
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_keras/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_keras/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_keras/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_keras/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_keras/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet_keras/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_keras/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet_keras/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  
-  clearTensorMap();
- 
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  double total_energy = 0.0;
-  float final_accuracy = 0.0;
-
-  for(int i = 0; i < total_runs; i++){
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performnce profiling 
-    startProfiling();
-    profiler.resume_profiler();
-
-    int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-    int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-    // NOTE: 'SAME' convolution
-    void* conv1out = tensorConvolution(input, conv1_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-
-    // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-    tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-
-    void* pool1out = tensorPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv1_tanh = tensorTanh(pool1out);
-
-    // NOTE: input channels have to match between tensor op inputs and outputs 
-    void* conv2out = tensorConvolution(conv1_tanh, conv2_filter, 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-    tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-    void* pool2out = tensorPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-
-    void* conv2_tanh = tensorTanh(pool2out);
-
-    void* gemm1out = tensorGemmGPU(conv2_tanh, fc1_weights);  
-
-    void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-
-    void* tanh1out = tensorTanh(gemm1biasout);
-  
-    void* gemm2out = tensorGemmGPU(tanh1out, fc2_weights);  
-  
-    void* gemm2_biasout = tensorAdd(gemm2out, fc2_bias);
-
-    void* tanh2out = tensorTanh(gemm2_biasout);
-  
-    void* result = tensorSoftmax(tanh2out);
-
-    profiler.pause_profiler();
-    auto time_energy = profiler.get_time_energy();
-    total_time += time_energy.first;
-    total_energy += time_energy.second;
-
-    profiler.reset();
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    final_accuracy += computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  final_accuracy = final_accuracy / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-}
-
-
-int main(int argc, char* argv[]){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_cifar10_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_cifar10_profiling.cc
deleted file mode 100644
index e84ca25f6fc3b44b02ce5b45f4517ba6ad6bc3be..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_cifar10_profiling.cc
+++ /dev/null
@@ -1,438 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_quant/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-  startProfiling();
-
-  int test_input_size = 5000; 
-  int batch_size = 1000; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  int total_runs = 10;
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  double total_energy = 0.0;
-
-  for(int i = 0; i < total_runs; i++){
-      for(int i = 0; i < batch_count; i++){ 
-
-        int start = i * batch_size; 
-        int end = (i + 1) * batch_size; 
-
-        void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        profiler.resume_profiler();
-        void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-        void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-        void* var_2 = tensorRelu(var_1); 
-        void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-        void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-        void* var_6 = tensorRelu(var_5); 
-        void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-        void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-        void* var_9 = tensorRelu(var_8); 
-        void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-        void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-        void* var_13 = tensorRelu(var_12); 
-        void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-        void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-        void* var_16 = tensorRelu(var_15); 
-        void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-        void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-        void* var_20 = tensorRelu(var_19); 
-        void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-        void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-        void* var_23 = tensorRelu(var_22); 
-        void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-        void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-        void* var_28 = tensorRelu(var_27); 
-        void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-        void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-        void* var_31 = tensorRelu(var_30); 
-        void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-        void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-        void* var_35 = tensorRelu(var_34); 
-        void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-        void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-        void* var_38 = tensorRelu(var_37); 
-        void* var_41 = tensorConvolution(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-        void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-        void* var_43 = tensorRelu(var_42); 
-        void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-        void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-        void* var_46 = tensorRelu(var_45); 
-        void* var_48 = tensorConvolution(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-        void* var_49 = tensorBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-        void* var_50 = tensorRelu(var_49); 
-        void* var_51 = tensorConvolution(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-        void* var_52 = tensorBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-        void* var_53 = tensorRelu(var_52); 
-        void* var_55 = tensorConvolution(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-        void* var_56 = tensorBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-        void* var_57 = tensorRelu(var_56); 
-        void* var_58 = tensorConvolution(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-        void* var_59 = tensorBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-        void* var_60 = tensorRelu(var_59); 
-        void* var_63 = tensorConvolution(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-        void* var_64 = tensorBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-        void* var_65 = tensorRelu(var_64); 
-        void* var_66 = tensorConvolution(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-        void* var_67 = tensorBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-        void* var_68 = tensorRelu(var_67); 
-        void* var_70 = tensorConvolution(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-        void* var_71 = tensorBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-        void* var_72 = tensorRelu(var_71); 
-        void* var_73 = tensorConvolution(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-        void* var_74 = tensorBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-        void* var_75 = tensorRelu(var_74); 
-        void* var_77 = tensorConvolution(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-        void* var_78 = tensorBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-        void* var_79 = tensorRelu(var_78); 
-        void* var_80 = tensorConvolution(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-        void* var_81 = tensorBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-        void* var_82 = tensorRelu(var_81); 
-        void* var_85 = tensorConvolution(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-        void* var_86 = tensorBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-        void* var_87 = tensorRelu(var_86); 
-        void* var_88 = tensorConvolution(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-        void* var_89 = tensorBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-        void* var_90 = tensorRelu(var_89); 
-        void* var_92 = tensorConvolution(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-        void* var_93 = tensorBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-        void* var_94 = tensorRelu(var_93); 
-        void* var_95 = tensorConvolution(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-        void* var_96 = tensorBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-        void* var_97 = tensorRelu(var_96); 
-        void* var_99 = tensorPooling(var_97,1,2,2,0,0,2,2); 
-        void* var_101 = tensorGemmGPU(var_99, dense_1_w); 
-        void* var_102 = tensorAdd(var_101, dense_1_b); 
-        void* var_103 = tensorSoftmax(var_102); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        total_energy += time_energy.second;
-        profiler.reset();
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-        float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-        final_accuracy += accuracy; 
-        freeBatchMemory(); 
-      }
-  }
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count / total_runs; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_depthwise_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_depthwise_profiling.cc
deleted file mode 100644
index 3dcce8ada9c74a439440594eb7df8a33c169cf6c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_depthwise_profiling.cc
+++ /dev/null
@@ -1,415 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  std::string dir_prefix = std::string("../model_params/mobilenet/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 1000;  
-  int batch_count = test_input_size / batch_size; 
-
-  int total_runs = 10;
-  float final_accuracy = 0.0; 
-
-  for (int run_num = 0; run_num < total_runs; run_num++){
-      for(int i = 0; i < batch_count; i++){ 
-
-        int start = i * batch_size; 
-        int end = (i + 1) * batch_size; 
-
-        void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-        void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-        void* var_2 = tensorRelu(var_1); 
-        void* var_4 = tensorConvCutlass(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-        void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-        void* var_6 = tensorRelu(var_5); 
-        void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-        void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-        void* var_9 = tensorRelu(var_8); 
-        void* var_11 = tensorConvCutlass(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-        void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-        void* var_13 = tensorRelu(var_12); 
-        void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-        void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-        void* var_16 = tensorRelu(var_15); 
-        void* var_18 = tensorConvCutlass(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-        void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-        void* var_20 = tensorRelu(var_19); 
-        void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-        void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-        void* var_23 = tensorRelu(var_22); 
-        void* var_26 = tensorConvCutlass(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-        void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-        void* var_28 = tensorRelu(var_27); 
-        void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-        void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-        void* var_31 = tensorRelu(var_30); 
-        void* var_33 = tensorConvCutlass(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-        void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-        void* var_35 = tensorRelu(var_34); 
-        void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-        void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-        void* var_38 = tensorRelu(var_37); 
-        void* var_41 = tensorConvCutlass(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-        void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-        void* var_43 = tensorRelu(var_42); 
-        void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-        void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-        void* var_46 = tensorRelu(var_45); 
-        void* var_48 = tensorConvCutlass(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-        void* var_49 = tensorBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-        void* var_50 = tensorRelu(var_49); 
-        void* var_51 = tensorConvolution(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-        void* var_52 = tensorBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-        void* var_53 = tensorRelu(var_52); 
-        void* var_55 = tensorConvCutlass(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-        void* var_56 = tensorBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-        void* var_57 = tensorRelu(var_56); 
-        void* var_58 = tensorConvolution(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-        void* var_59 = tensorBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-        void* var_60 = tensorRelu(var_59); 
-        void* var_63 = tensorConvCutlass(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-        void* var_64 = tensorBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-        void* var_65 = tensorRelu(var_64); 
-        void* var_66 = tensorConvolution(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-        void* var_67 = tensorBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-        void* var_68 = tensorRelu(var_67); 
-        void* var_70 = tensorConvCutlass(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-        void* var_71 = tensorBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-        void* var_72 = tensorRelu(var_71); 
-        void* var_73 = tensorConvolution(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-        void* var_74 = tensorBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-        void* var_75 = tensorRelu(var_74); 
-        void* var_77 = tensorConvCutlass(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-        void* var_78 = tensorBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-        void* var_79 = tensorRelu(var_78); 
-        void* var_80 = tensorConvolution(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-        void* var_81 = tensorBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-        void* var_82 = tensorRelu(var_81); 
-        void* var_85 = tensorConvCutlass(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-        void* var_86 = tensorBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-        void* var_87 = tensorRelu(var_86); 
-        void* var_88 = tensorConvolution(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-        void* var_89 = tensorBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-        void* var_90 = tensorRelu(var_89); 
-        void* var_92 = tensorConvCutlass(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-        void* var_93 = tensorBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-        void* var_94 = tensorRelu(var_93); 
-        void* var_95 = tensorConvolution(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-        void* var_96 = tensorBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-        void* var_97 = tensorRelu(var_96); 
-        void* var_99 = tensorPooling(var_97,1,2,2,0,0,2,2); 
-        void* var_101 = tensorGemmGPU(var_99, dense_1_w); 
-        void* var_102 = tensorAdd(var_101, dense_1_b); 
-        void* var_103 = tensorSoftmax(var_102); 
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-        float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-        final_accuracy += accuracy; 
-        freeBatchMemory(); 
-      }
-  }
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_shallow_depthwise_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_shallow_depthwise_profiling.cc
deleted file mode 100644
index 5af17774b41d4d265e110dd988e3458442312226..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_shallow_depthwise_profiling.cc
+++ /dev/null
@@ -1,247 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(int argc, char* argv[]){ 
-
-  int total_runs = 10;
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  
-  llvm_hpvm_initTensorRt(0); 
-
-  //std::string dir_prefix = std::string("../../keras/data/mobilenet_shallow_nathan/");
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_shallow/");
-
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 500; 
-  int batch_count = test_input_size / batch_size; 
-
-
-  float final_accuracy = 0.0;
-
-  for(int j = 0; j < total_runs; j++){    
-    for(int i = 0; i < batch_count; i++){ 
-
-      int start = i * batch_size; 
-      int end = (i + 1) * batch_size; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-      void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorRelu(var_1); 
-      void* var_4 = tensorConvCutlass(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_6 = tensorRelu(var_5); 
-      void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-      void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-      void* var_9 = tensorRelu(var_8); 
-      void* var_11 = tensorConvCutlass(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-      void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-      void* var_13 = tensorRelu(var_12); 
-      void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-      void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-      void* var_16 = tensorRelu(var_15); 
-      void* var_18 = tensorConvCutlass(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-      void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-      void* var_20 = tensorRelu(var_19); 
-      void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-      void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-      void* var_23 = tensorRelu(var_22); 
-      void* var_26 = tensorConvCutlass(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-      void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-      void* var_28 = tensorRelu(var_27); 
-      void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-      void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-      void* var_31 = tensorRelu(var_30); 
-      void* var_33 = tensorConvCutlass(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-      void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-      void* var_35 = tensorRelu(var_34); 
-      void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-      void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_41 = tensorConvCutlass(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-      void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-      void* var_43 = tensorRelu(var_42); 
-      void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-      void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-      void* var_46 = tensorRelu(var_45); 
-      void* var_47 = tensorPooling(var_46,1,2,2,0,0,2,2); 
-      void* var_49 = tensorGemmGPU(var_47, dense_1_w); 
-      void* var_50 = tensorAdd(var_49, dense_1_b); 
-      void* var_51 = tensorSoftmax(var_50); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_51); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    //final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy / batch_count); 
-  }
-
-  final_accuracy = final_accuracy / batch_count / total_runs; 
-  dumpFinalAccuracy(final_accuracy);
-
-  //dumpExecutionAccuracies();
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_shallow_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_shallow_profiling.cc
deleted file mode 100644
index c60f15e3cb71d9fa81b444be8348803e1d7891d6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/mobilenet_shallow_profiling.cc
+++ /dev/null
@@ -1,227 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/mobilenet_shallow/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  int total_runs = 10;
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  double total_energy = 0.0;
-
-  for(int i = 0; i < total_runs; i++){
-	  for(int i = 0; i < batch_count; i++){ 
-
-		int start = i * batch_size; 
-		int end = (i + 1) * batch_size; 
-
-		void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        profiler.resume_profiler();
-
-		void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-		void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-		void* var_2 = tensorRelu(var_1); 
-		void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-		void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-		void* var_6 = tensorRelu(var_5); 
-		void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-		void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-		void* var_9 = tensorRelu(var_8); 
-		void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-		void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-		void* var_13 = tensorRelu(var_12); 
-		void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-		void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-		void* var_16 = tensorRelu(var_15); 
-		void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 2, 2, 1, 64); 
-		void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-		void* var_20 = tensorRelu(var_19); 
-		void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-		void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-		void* var_23 = tensorRelu(var_22); 
-		void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-		void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-		void* var_28 = tensorRelu(var_27); 
-		void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-		void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-		void* var_31 = tensorRelu(var_30); 
-		void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-		void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-		void* var_35 = tensorRelu(var_34); 
-		void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-		void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-		void* var_38 = tensorRelu(var_37); 
-		void* var_40 = tensorPooling(var_38,1,2,2,0,0,2,2); 
-		void* var_42 = tensorGemmGPU(var_40, dense_1_w); 
-		void* var_43 = tensorAdd(var_42, dense_1_b); 
-		void* var_44 = tensorSoftmax(var_43); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        total_energy += time_energy.second;
-        profiler.reset();
-
-		uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-		float accuracy = computeAccuracy2(labels, batch_size, var_44); 
-		final_accuracy += accuracy; 
-		freeBatchMemory(); 
-	  } 
-  }
-
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  final_accuracy = final_accuracy / batch_count / total_runs; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/resnet18_cifar10_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/resnet18_cifar10_profiling.cc
deleted file mode 100644
index 1b0664200170235e2d0dac5682108de97b094776..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/resnet18_cifar10_profiling.cc
+++ /dev/null
@@ -1,245 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-  
-  std::string dir_prefix = std::string("../model_params/resnet18_cifar10_3/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0, batch_size,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(), batch_size); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-  void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-  std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-  void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-  std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-  void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-  void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-  void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-  void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-  void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-  void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 5000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  int total_runs = 10;
-
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  double total_energy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-
-  for(int i = 0; i < total_runs; i++){
-      for(int i = 0; i < batch_count; i++){
-        int start = i * batch_size;
-        int end = (i + 1) * batch_size;
-        
-        void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-
-        profiler.resume_profiler();
-        
-        void* var_2 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-        void* var_3 = tensorAdd(var_2, conv2d_1_b); 
-        void* var_4 = tensorRelu(var_3); 
-        void* var_6 = tensorConvolution(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-        void* var_7 = tensorAdd(var_6, conv2d_2_b); 
-        void* var_8 = tensorRelu(var_7); 
-        void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-        void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-        void* var_12 = tensorAdd(var_4, var_11); 
-        void* var_13 = tensorRelu(var_12); 
-        void* var_15 = tensorConvolution(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-        void* var_16 = tensorAdd(var_15, conv2d_4_b); 
-        void* var_17 = tensorRelu(var_16); 
-        void* var_19 = tensorConvolution(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-        void* var_20 = tensorAdd(var_19, conv2d_5_b); 
-        void* var_21 = tensorAdd(var_13, var_20); 
-        void* var_22 = tensorRelu(var_21); 
-        void* var_24 = tensorConvolution(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-        void* var_25 = tensorAdd(var_24, conv2d_6_b); 
-        void* var_26 = tensorRelu(var_25); 
-        void* var_28 = tensorConvolution(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-        void* var_29 = tensorAdd(var_28, conv2d_7_b); 
-        void* var_30 = tensorAdd(var_22, var_29); 
-        void* var_31 = tensorRelu(var_30); 
-        void* var_33 = tensorConvolution(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-        void* var_34 = tensorAdd(var_33, conv2d_8_b); 
-        void* var_35 = tensorRelu(var_34); 
-        void* var_37 = tensorConvolution(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-        void* var_38 = tensorAdd(var_37, conv2d_9_b); 
-        void* var_40 = tensorConvolution(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-        void* var_41 = tensorAdd(var_40, conv2d_10_b); 
-        void* var_42 = tensorAdd(var_41, var_38); 
-        void* var_43 = tensorRelu(var_42); 
-        void* var_45 = tensorConvolution(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-        void* var_46 = tensorAdd(var_45, conv2d_11_b); 
-        void* var_47 = tensorRelu(var_46); 
-        void* var_49 = tensorConvolution(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-        void* var_50 = tensorAdd(var_49, conv2d_12_b); 
-        void* var_51 = tensorAdd(var_43, var_50); 
-        void* var_52 = tensorRelu(var_51); 
-        void* var_54 = tensorConvolution(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-        void* var_55 = tensorAdd(var_54, conv2d_13_b); 
-        void* var_56 = tensorRelu(var_55); 
-        void* var_58 = tensorConvolution(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-        void* var_59 = tensorAdd(var_58, conv2d_14_b); 
-        void* var_60 = tensorAdd(var_52, var_59); 
-        void* var_61 = tensorRelu(var_60); 
-        void* var_63 = tensorConvolution(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-        void* var_64 = tensorAdd(var_63, conv2d_15_b); 
-        void* var_65 = tensorRelu(var_64); 
-        void* var_67 = tensorConvolution(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-        void* var_68 = tensorAdd(var_67, conv2d_16_b); 
-        void* var_70 = tensorConvolution(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-        void* var_71 = tensorAdd(var_70, conv2d_17_b); 
-        void* var_72 = tensorAdd(var_71, var_68); 
-        void* var_73 = tensorRelu(var_72); 
-        void* var_75 = tensorConvolution(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-        void* var_76 = tensorAdd(var_75, conv2d_18_b); 
-        void* var_77 = tensorRelu(var_76); 
-        void* var_79 = tensorConvolution(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-        void* var_80 = tensorAdd(var_79, conv2d_19_b); 
-        void* var_81 = tensorAdd(var_73, var_80); 
-        void* var_82 = tensorRelu(var_81); 
-        void* var_84 = tensorConvolution(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-        void* var_85 = tensorAdd(var_84, conv2d_20_b); 
-        void* var_86 = tensorRelu(var_85); 
-        void* var_88 = tensorConvolution(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-        void* var_89 = tensorAdd(var_88, conv2d_21_b); 
-        void* var_90 = tensorAdd(var_82, var_89); 
-        void* var_91 = tensorRelu(var_90); 
-        void* var_92 = tensorPooling(var_91,1,8,8,0,0,8,8); 
-        void* var_94 = tensorGemmGPU(var_92, dense_1_w); 
-        void* var_95 = tensorAdd(var_94, dense_1_b); 
-        void* var_96 = tensorSoftmax(var_95); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        total_energy += time_energy.second;
-        profiler.reset();
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-        float accuracy = computeAccuracy2(labels,batch_size,var_96); 
-        final_accuracy += accuracy;
-        
-        freeBatchMemory();
-    }
-  }
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/vgg16_cifar100_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/vgg16_cifar100_profiling.cc
deleted file mode 100644
index 70246dac4b3d43550f49a0a653d1c13396f3a84a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/vgg16_cifar100_profiling.cc
+++ /dev/null
@@ -1,184 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 500; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  int total_runs = 10;
-  Profiler profiler;
-  profiler.start_profiler();
-  double total_time = 0.0;
-  double total_energy = 0.0;
-
-  for (int i = 0; i < total_runs; i++){
-	  for(int i = 0; i < batch_count; i++){ 
-
-		int start = i * batch_size; 
-		int end = (i + 1) * batch_size; 
-
-		void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-        profiler.resume_profiler();
-
-		void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-		void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-		void* var_2 = tensorRelu(var_1); 
-		void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-		void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-		void* var_6 = tensorRelu(var_5); 
-		void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-		void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-		void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-		void* var_10 = tensorRelu(var_9); 
-		void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-		void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-		void* var_14 = tensorRelu(var_13); 
-		void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-		void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-		void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-		void* var_18 = tensorRelu(var_17); 
-		void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-		void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-		void* var_22 = tensorRelu(var_21); 
-		void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-		void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-		void* var_26 = tensorRelu(var_25); 
-		void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-		void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-		void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-		void* var_30 = tensorRelu(var_29); 
-		void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-		void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-		void* var_34 = tensorRelu(var_33); 
-		void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-		void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-		void* var_38 = tensorRelu(var_37); 
-		void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-		void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-		void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-		void* var_42 = tensorRelu(var_41); 
-		void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-		void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-		void* var_46 = tensorRelu(var_45); 
-		void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-		void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-		void* var_50 = tensorRelu(var_49); 
-		void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-		void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-		void* var_55 = tensorAdd(var_54, dense_1_b); 
-		void* var_56 = tensorRelu(var_55); 
-		void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-		void* var_59 = tensorAdd(var_58, dense_2_b); 
-		void* var_60 = tensorSoftmax(var_59); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        total_energy += time_energy.second;
-        profiler.reset();
-
-		uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-		float accuracy = computeAccuracy2(labels, batch_size, var_60, 100); 
-		final_accuracy += accuracy; 
-		freeBatchMemory(); 
-	 
-	  }
-  }
-
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  final_accuracy = final_accuracy / batch_count / total_runs; 
-  dumpFinalAccuracy(final_accuracy); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/vgg16_cifar10_profiling.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/vgg16_cifar10_profiling.cc
deleted file mode 100644
index 7ed583884a3fa2fca745bde4d27f8ca92cfcda02..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/profiling/vgg16_cifar10_profiling.cc
+++ /dev/null
@@ -1,185 +0,0 @@
-#include "/home/nvidia/Gitlab/hpvm/llvm/projects/gpu_profiler/include/profiler.h"
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar10_2/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 5000;
-  int batch_size = 500;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  Profiler profiler;
-  profiler.start_profiler();
-
-  double total_time = 0.0;
-  double total_energy = 0.0;
-
-  int total_runs = 10; 
-  for(int i = 0; i < total_runs; i++){
-      for(int i = 0; i < batch_count; i++){
-
-        int start = i * batch_size;
-        int end = (i + 1) * batch_size;
-        
-        void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32); 
-
-        profiler.resume_profiler();
-     
-        void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-        void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-        void* var_2 = tensorRelu(var_1); 
-        void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-        void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-        void* var_6 = tensorRelu(var_5); 
-        void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-        void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-        void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-        void* var_10 = tensorRelu(var_9); 
-        void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-        void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-        void* var_14 = tensorRelu(var_13); 
-        void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-        void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-        void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-        void* var_18 = tensorRelu(var_17); 
-        void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-        void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-        void* var_22 = tensorRelu(var_21); 
-        void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-        void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-        void* var_26 = tensorRelu(var_25); 
-        void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-        void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-        void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-        void* var_30 = tensorRelu(var_29); 
-        void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-        void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-        void* var_34 = tensorRelu(var_33); 
-        void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-        void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-        void* var_38 = tensorRelu(var_37); 
-        void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-        void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-        void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-        void* var_42 = tensorRelu(var_41); 
-        void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-        void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-        void* var_46 = tensorRelu(var_45); 
-        void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-        void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-        void* var_50 = tensorRelu(var_49); 
-        void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-        void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-        void* var_55 = tensorAdd(var_54, dense_1_b); 
-        void* var_56 = tensorRelu(var_55); 
-        void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-        void* var_59 = tensorAdd(var_58, dense_2_b); 
-        void* var_60 = tensorSoftmax(var_59); 
-
-        profiler.pause_profiler();
-        auto time_energy = profiler.get_time_energy();
-        total_time += time_energy.first;
-        total_energy += time_energy.second;
-        profiler.reset();
-
-        uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-        float accuracy = computeAccuracy2(labels,batch_size,var_60); 
-        final_accuracy += accuracy;
-        
-        freeBatchMemory();
-    }
-  }
-  profiler.stop_profiler();
-
-  std::cout<<"---------------------------------------\n";
-  std::cout<<"Average time: " << total_time / total_runs << '\n';
-  std::cout<<"Average energy: " << total_energy / total_runs << '\n';
-  std::cout<<"---------------------------------------\n";
-
-  final_accuracy = final_accuracy / batch_count / total_runs;
-  dumpFinalAccuracy(final_accuracy);
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_cifar10_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_cifar10_promise.cc
deleted file mode 100644
index fbc9d038505313adefdf9100a1e55e3a98d823f8..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_cifar10_promise.cc
+++ /dev/null
@@ -1,163 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 100000;
-  }
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/alexnet2_cifar10/test_labels.bin", test_batch_size);
-
-  for(int i = 0; i < total_runs; i++){
-
-    void* input = readTrainedWeights("../model_params/alexnet2_cifar10/norm_cifar_input.bin",
-			  	   float_type,
-				   test_batch_size, 3, 32, 32);
-    
-    void* conv1_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv1.bin",
-					    float_type, 32, 3, 3, 3);  
-    void* conv1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv1_bias.bin",
-					  float_type, 1, 32, 1, 1);  
-    void* conv2_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv2.bin",
-					    float_type, 32, 32, 3, 3);  
-    void* conv2_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv2_bias.bin",
-					  float_type, 1, 32, 1, 1);
-    void* conv3_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv3.bin",
-					    float_type, 64, 32, 3, 3);  
-    void* conv3_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv3_bias.bin",
-					  float_type, 1, 64, 1, 1);  
-    void* conv4_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv4.bin",
-					    float_type, 64, 64, 3, 3);  
-    void* conv4_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv4_bias.bin",
-					  float_type, 1, 64, 1, 1);
-    void* conv5_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv5.bin",
-					    float_type, 128, 64, 3, 3);  
-    void* conv5_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv5_bias.bin",
-					  float_type, 1, 128, 1, 1);
-    void* conv6_filter = readTrainedWeights("../model_params/alexnet2_cifar10/conv6.bin",
-					    float_type, 128, 128, 3, 3);  
-    void* conv6_bias = readTrainedWeights("../model_params/alexnet2_cifar10/conv6_bias.bin",
-					  float_type, 1, 128, 1, 1);
-  
-    void* fc1_weights = readTrainedWeights("../model_params/alexnet2_cifar10/fc1.bin",
-					   float_type, 1, 1, 2048, 10);  
-    void* fc1_bias = readTrainedWeights("../model_params/alexnet2_cifar10/fc1_bias.bin",
-					float_type, 1, 10, 1, 1);  
- 
-  
-    clearTensorMap();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performance profiling 
-    startProfiling();
-
-    //-1.881, 2.09
-    //-0.18,0.174
-    void* conv1_out = ConvLayer_PROMISE(input, -1.881, 2.09, conv1_filter, -0.542,0.371, conv1_bias, -0.066,0.04,
-					1, 1, 1, 1, 0, 0, 0, -1,1, 9);
-
-    void* conv2_out = ConvLayer_PROMISE(conv1_out, -1,1, conv2_filter, -0.424,0.314, conv2_bias, -0.355,-0.172, 
-					1, 1, 1, 1, 0, 2, 0, -1,1, 9);
-    
-    void* conv3_out = ConvLayer_PROMISE(conv2_out, -1,1, conv3_filter, -0.441,0.795, conv3_bias, -0.804,0.753, 
-   				       1, 1, 1, 1, 0, 0, 0, -1,1, 9);
-
-    void* conv4_out = ConvLayer_PROMISE(conv3_out, -1,1, conv4_filter, -0.288,0.31, conv4_bias, -0.635,0.29, 
-				        1, 1, 1, 1, 0, 2, 0, -1,1, 9);
-
-    void* conv5_out = ConvLayer_PROMISE(conv4_out, -1,1, conv5_filter, -0.279,0.376, conv5_bias, -1.13, 1.239,
-					1, 1, 1, 1, 0, 0, 0, -1,1, 9);
-
-    void* conv6_out = ConvLayer_PROMISE(conv5_out, -1,1, conv6_filter, -0.27,0.279, conv6_bias, -0.503,0.127,
-					1, 1, 1, 1, 0, 2, 0, -1,1, 9);
-
-    // No Activation
-    void* fc1_out = FCLayer_PROMISE(conv6_out, -1,1, fc1_weights, -0.242,0.584, fc1_bias, -0.537,0.558, -1, -1,1, 9);
-    
-    void* result = tensorSoftmax(fc1_out);
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_promise.cc
deleted file mode 100644
index 0b6e819b8ccd5f6eae7b0ac4c4a593e3c1998c0d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_promise.cc
+++ /dev/null
@@ -1,132 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-int total_runs = 1;
-float bench_acc = 0;
-int to_skip = 5;
-
-int main(int argc, char* argv[]){ 
-
-  int test_input_size = 3000; 
-  int batch_size = 1000;
-  int offset = 5000;
-
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  if (argc > 2){
-    bench_acc = atof(argv[2]);
-  }
-
-  if(argc > 3){
-    to_skip = atoi(argv[3]);   
-  }
-
-  if(argc > 4){
-    test_input_size = atoi(argv[4]);   
-  }
-
-  if(argc > 5){
-    offset = atoi(argv[5]);   
-  }
-
-  
-  llvm_hpvm_initTensorRt(1); 
-
-  int missed = 0;
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    if (missed >= to_skip){
-      break;           
-    }
-
-    startMemTracking(); 
-
-    
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../model_params/alexnet2_cifar10_test/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-      void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,32,3,3); 
-      std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-      void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-      void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,128,64,3,3); 
-      std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-      void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,128,128,3,3); 
-      std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-      void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,128,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = i * batch_size + offset; 
-      int end = (i + 1) * batch_size + offset; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.8816435, 2.0934134, conv2d_1_w, -0.5421946, 0.3710851, conv2d_1_b, -0.06697306, 0.040868897, 1, 1, 1, 1, -1, 0, 0, -0.9998477, 0.99987465, 9); 
-      //      void* var_1 = ConvLayer_PROMISE(var_0, -0.9998477, 0.99987465, conv2d_2_w, -0.42474225, 0.31460348, conv2d_2_b, -0.3557253, -0.17281663, 1, 1, 1, 1, 0, 2, 0, -0.99997115, 1.0, 9);
-      void* var_1 = ConvLayer_PROMISE(var_0, -0.8, 0.8, conv2d_2_w, -0.42474225, 0.31460348, conv2d_2_b, -0.3557253, -0.17281663, 1, 1, 1, 1, 0, 2, 0, -0.99997115, 1.0, 9);
-      
-      void* var_2 = ConvLayer_PROMISE(var_1, -0.99997115, 1.0, conv2d_3_w, -0.44134507, 0.79587924, conv2d_3_b, -0.80424446, 0.75330096, 1, 1, 1, 1, -1, 0, 0, -0.9999999, 1.0, 9); 
-      void* var_3 = ConvLayer_PROMISE(var_2, -0.9999999, 1.0, conv2d_4_w, -0.2883836, 0.31025785, conv2d_4_b, -0.6353164, 0.29015934, 1, 1, 1, 1, 0, 2, 0, -0.9999999, 0.99999934, 9); 
-      void* var_4 = ConvLayer_PROMISE(var_3, -0.9999999, 0.99999934, conv2d_5_w, -0.2792431, 0.37689754, conv2d_5_b, -1.1379756, 1.2391574, 1, 1, 1, 1, -1, 0, 0, -1.0, 1.0, 9); 
-      void* var_5 = ConvLayer_PROMISE(var_4, -1.0, 1.0, conv2d_6_w, -0.27078503, 0.27942517, conv2d_6_b, -0.503003, 0.12762362, 1, 1, 1, 1, 0, 2, 0, -0.9999941, 0.9999964, 9); 
-      void* var_6 = FCLayer_PROMISE(var_5, -0.9999941, 0.9999964, dense_1_w, -0.24273404, 0.5845544, dense_1_b, -0.53745, 0.558251, -1, -140.6419, 16.402884, 9); 
-      void* var_7 = tensorSoftmax(var_6); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_7); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy);
-
-    if (final_accuracy < bench_acc)
-      missed += 1;
-  }
-
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_promise_quant.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_promise_quant.cc
deleted file mode 100644
index 3c3bc018518cf6ab3ff7dd7a608900308efa1e49..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_promise_quant.cc
+++ /dev/null
@@ -1,93 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(1); 
-
-  int total_runs = 1; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 5000; 
-    int batch_size = 5000;
-    int offset = 5000;
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){
-      
-      std::string dir_prefix = std::string("../model_params/alexnet2_cifar10_test/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-      void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,32,3,3); 
-      std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-      void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-      void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,128,64,3,3); 
-      std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-      void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,128,128,3,3); 
-      std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-      void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,128,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-      int start = i * batch_size + offset; 
-      int end = (i + 1) * batch_size + offset; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.8816435, 2.0934134, conv2d_1_w, -0.5421946, 0.3710851, conv2d_1_b, -0.06697306, 0.040868897, 1, 1, 1, 1, -1, 0, 0, -0.7750273948907852, 0.7799443006515503, 9); 
-      void* var_1 = ConvLayer_PROMISE(var_0, -0.7750273948907852, 0.7799443006515503, conv2d_2_w, -0.42474225, 0.31460348, conv2d_2_b, -0.3557253, -0.17281663, 1, 1, 1, 1, 0, 2, 0, -0.8086670643091202, 0.98395329773426, 9); 
-      void* var_2 = ConvLayer_PROMISE(var_1, -0.8086670643091202, 0.98395329773426, conv2d_3_w, -0.44134507, 0.79587924, conv2d_3_b, -0.80424446, 0.75330096, 1, 1, 1, 1, -1, 0, 0, -0.9956784248352051, 0.9985664486885071, 9); 
-      void* var_3 = ConvLayer_PROMISE(var_2, -0.9956784248352051, 0.9985664486885071, conv2d_4_w, -0.2883836, 0.31025785, conv2d_4_b, -0.6353164, 0.29015934, 1, 1, 1, 1, 0, 2, 0, -0.9932191967964172, 0.9923790097236633, 9); 
-      void* var_4 = ConvLayer_PROMISE(var_3, -0.9932191967964172, 0.9923790097236633, conv2d_5_w, -0.2792431, 0.37689754, conv2d_5_b, -1.1379756, 1.2391574, 1, 1, 1, 1, -1, 0, 0, -0.9999013543128967, 0.9999108910560608, 9); 
-      void* var_5 = ConvLayer_PROMISE(var_4, -0.9999013543128967, 0.9999108910560608, conv2d_6_w, -0.27078503, 0.27942517, conv2d_6_b, -0.503003, 0.12762362, 1, 1, 1, 1, 0, 2, 0, -0.991036117374897, 0.9714049702882765, 9); 
-      void* var_6 = FCLayer_PROMISE(var_5, -0.991036117374897, 0.9714049702882765, dense_1_w, -0.24273404, 0.5845544, dense_1_b, -0.53745, 0.558251, -1, -119.27973731994629, -25.226281957626327, 9); 
-      void* var_7 = tensorSoftmax(var_6); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_7); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_valid.cc
deleted file mode 100644
index 1799480796fb988d0e9624fb482339d2345d2728..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet2_valid.cc
+++ /dev/null
@@ -1,92 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  int total_runs = 1; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 5000; 
-    int batch_size = 5000; 
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){
-      
-      std::string dir_prefix = std::string("../model_params/alexnet2_cifar10_test/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-      void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,32,3,3); 
-      std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-      void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-      void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,128,64,3,3); 
-      std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-      void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,128,128,3,3); 
-      std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-      void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,128,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-      int start = i * batch_size; 
-      int end = (i + 1) * batch_size; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.8816435, 2.0934134, conv2d_1_w, -0.5421946, 0.3710851, conv2d_1_b, -0.06697306, 0.040868897, 1, 1, 1, 1, -1, 0, 0, -0.7750273948907852, 0.7799443006515503, 9); 
-      void* var_1 = ConvLayer_PROMISE(var_0, -0.7750273948907852, 0.7799443006515503, conv2d_2_w, -0.42474225, 0.31460348, conv2d_2_b, -0.3557253, -0.17281663, 1, 1, 1, 1, 0, 2, 0, -0.8086670643091202, 0.98395329773426, 9); 
-      void* var_2 = ConvLayer_PROMISE(var_1, -0.8086670643091202, 0.98395329773426, conv2d_3_w, -0.44134507, 0.79587924, conv2d_3_b, -0.80424446, 0.75330096, 1, 1, 1, 1, -1, 0, 0, -0.9956784248352051, 0.9985664486885071, 9); 
-      void* var_3 = ConvLayer_PROMISE(var_2, -0.9956784248352051, 0.9985664486885071, conv2d_4_w, -0.2883836, 0.31025785, conv2d_4_b, -0.6353164, 0.29015934, 1, 1, 1, 1, 0, 2, 0, -0.9932191967964172, 0.9923790097236633, 9); 
-      void* var_4 = ConvLayer_PROMISE(var_3, -0.9932191967964172, 0.9923790097236633, conv2d_5_w, -0.2792431, 0.37689754, conv2d_5_b, -1.1379756, 1.2391574, 1, 1, 1, 1, -1, 0, 0, -0.9999013543128967, 0.9999108910560608, 9); 
-      void* var_5 = ConvLayer_PROMISE(var_4, -0.9999013543128967, 0.9999108910560608, conv2d_6_w, -0.27078503, 0.27942517, conv2d_6_b, -0.503003, 0.12762362, 1, 1, 1, 1, 0, 2, 0, -0.991036117374897, 0.9714049702882765, 9); 
-      void* var_6 = FCLayer_PROMISE(var_5, -0.991036117374897, 0.9714049702882765, dense_1_w, -0.24273404, 0.5845544, dense_1_b, -0.53745, 0.558251, -1, -119.27973731994629, -25.226281957626327, 9); 
-      void* var_7 = tensorSoftmax(var_6); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_7); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_cifar10_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_cifar10_promise.cc
deleted file mode 100644
index 3e39f5cf03ce25511429d84ada9812fef0998194..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_cifar10_promise.cc
+++ /dev/null
@@ -1,158 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 100000;
-  }
-  
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/alexnet_cifar10/test_labels.bin", test_batch_size);
- 
-  for(int i = 0; i < total_runs; i++){
-
-    void* input = readTrainedWeights("../model_params/alexnet_cifar10/norm_cifar_input.bin",
-				     float_type,
-				     test_batch_size, 3, 32, 32);
-
-    void* conv1_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv1.bin",
-					    float_type, 64, 3, 11, 11);  
-    void* conv1_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv1_bias.bin",
-					  float_type, 1, 64, 1, 1);  
-    void* conv2_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv2.bin",
-					    float_type, 192, 64, 5, 5);  
-    void* conv2_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv2_bias.bin",
-					  float_type, 1, 192, 1, 1);
-
-    void* conv3_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv3.bin",
-					    float_type, 384, 192, 3, 3);  
-    void* conv3_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv3_bias.bin",
-					  float_type, 1, 384, 1, 1);  
-    void* conv4_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv4.bin",
-					    float_type, 256, 384, 3, 3);  
-    void* conv4_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv4_bias.bin",
-					  float_type, 1, 256, 1, 1);
-    void* conv5_filter = readTrainedWeights("../model_params/alexnet_cifar10/conv5.bin",
-					    float_type, 256, 256, 3, 3);  
-    void* conv5_bias = readTrainedWeights("../model_params/alexnet_cifar10/conv5_bias.bin",
-					  float_type, 1, 256, 1, 1);
-  
-    void* fc1_weights = readTrainedWeights("../model_params/alexnet_cifar10/fc1.bin",
-					   float_type, 1, 1, 4096, 10);  
-    void* fc1_bias = readTrainedWeights("../model_params/alexnet_cifar10/fc1_bias.bin",
-					float_type, 1, 10, 1, 1);  
-    
-  
-    clearTensorMap();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    // Start power and performance profiling 
-    startProfiling();
-
-    //-1.881, 2.09
-    //-0.18,0.174
-    void* conv1_out = ConvLayer_PROMISE(input, -1.881, 2.09, conv1_filter, -0.345,0.331, conv1_bias, -0.76,0.59,
-					5, 5, 1, 1, 0, 2, 0, -1,1, 9);
-
-    void* conv2_out = ConvLayer_PROMISE(conv1_out, -1,1, conv2_filter, -0.22,0.264, conv2_bias, -0.448,0.343, 
-					2, 2, 1, 1, 0, 2, 0, -1,1, 9);
-    
-    void* conv3_out = ConvLayer_PROMISE(conv2_out, -1,1, conv3_filter, -0.192,0.187, conv3_bias, -0.91,0.67, 
-   				       1, 1, 1, 1, 0, 0, 0, -1,1, 9);
-
-    void* conv4_out = ConvLayer_PROMISE(conv3_out, -1,1, conv4_filter, -0.131,0.141, conv4_bias, -0.416,0.342, 
-				        1, 1, 1, 1, 0, 0, 0, -1,1, 9);
-
-    void* conv5_out = ConvLayer_PROMISE(conv4_out, -1,1, conv5_filter, -0.165,0.188, conv5_bias, -0.283,0.062,
-					1, 1, 1, 1, 0, 2, 0, -1,1, 9);
-
-    // No Activation
-    void* fc1_out = FCLayer_PROMISE(conv5_out, -1,1, fc1_weights, -0.181,0.233, fc1_bias, -0.063,0.137, -1, -1,1, 9);
-    
-    void* result = tensorSoftmax(fc1_out);
-
-    // End profiling and dump output to profile.txt
-    stopProfiling();
-  
-    computeAccuracy2(labels, test_batch_size, result);
-    
-    dumpAccuracyNorms();
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-
-  
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_promise.cc
deleted file mode 100644
index 2529e8eabfb061ae26798612ce9f30fb57e096f5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_promise.cc
+++ /dev/null
@@ -1,130 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-int total_runs = 1;
-float bench_acc = 0;
-int to_skip = 5;
-
-
-int main(int argc, char* argv[]){ 
-
-  int test_input_size = 3000; 
-  int batch_size = 1000;
-  int offset = 5000;
-
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  if (argc > 2){
-    bench_acc = atof(argv[2]);
-  }
-
-  if(argc > 3){
-    to_skip = atoi(argv[3]);   
-  }
-  
-  if(argc > 4){
-    test_input_size = atoi(argv[4]);   
-  }
-
-  if(argc > 5){
-    offset = atoi(argv[5]);   
-  }
-
-  
-  llvm_hpvm_initTensorRt(0); 
-
-
-  
-  int missed = 0; 
-  for (int i = 0 ; i < total_runs; i++){ 
-    
-    if (missed >= to_skip){
-     break;           
-    }
-
-    startMemTracking(); 
-
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){
-      
-      std::string dir_prefix = std::string("../model_params/alexnet_cifar10_test/");   
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-      std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-      void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-      std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-      void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-      std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-      void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-      std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-      void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-      
-
-      int start = i * batch_size + offset; 
-      int end = (i + 1) * batch_size + offset; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.8816426241908337, 2.0934095498544254, conv2d_1_w, -0.33087718, 0.3323643, conv2d_1_b, -0.7782218, 0.6020472, 5, 5, 1, 1, 0, 2, 0, -0.978641152381897, 0.9989452958106995, 9); 
-      void* var_1 = ConvLayer_PROMISE(var_0, -0.978641152381897, 0.9989452958106995, conv2d_2_w, -0.2095158, 0.33543423, conv2d_2_b, -0.45020863, 0.30596754, 2, 2, 1, 1, 0, 2, 0, -0.9997039437294006, 0.999930202960968, 9); 
-      void* var_2 = ConvLayer_PROMISE(var_1, -0.9997039437294006, 0.999930202960968, conv2d_3_w, -0.1715614, 0.17037082, conv2d_3_b, -0.6519161, 0.5939945, 1, 1, 1, 1, -1, 0, 0, -0.9999336004257202, 0.999940037727356, 9); 
-      void* var_3 = ConvLayer_PROMISE(var_2, -0.9999336004257202, 0.999940037727356, conv2d_4_w, -0.15575546, 0.14456555, conv2d_4_b, -0.55873865, 0.4704539, 1, 1, 1, 1, -1, 0, 0, -0.9999991059303284, 0.9999993443489075, 9); 
-      void* var_4 = ConvLayer_PROMISE(var_3, -0.9999991059303284, 0.9999993443489075, conv2d_5_w, -0.16108225, 0.16864482, conv2d_5_b, -0.22135437, 0.10401678, 1, 1, 1, 1, 0, 2, 0, -0.9994344115257263, 0.9996342062950134, 9); 
-      void* var_5 = FCLayer_PROMISE(var_4, -0.9994344115257263, 0.9996342062950134, dense_1_w, -0.18183032, 0.19018902, dense_1_b, -0.07189204, 0.106005594, -1, -15.076565380096437, 19.422585220336913, 9); 
-      void* var_6 = tensorSoftmax(var_5); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_6); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy);
-
-    
-    if (final_accuracy < bench_acc)
-      missed += 1;
-  }
-
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_promise_quant.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_promise_quant.cc
deleted file mode 100644
index 6b2b0e80ba92fa449cdd06036946101df76317e7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_promise_quant.cc
+++ /dev/null
@@ -1,90 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(1); 
-
-  int total_runs = 1; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 5000; 
-    int batch_size = 5000;
-    int offset = 5000;
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../model_params/alexnet_cifar10_test/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-      std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-      void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-      std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-      void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-      std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-      void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-      std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-      void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = i * batch_size + offset; 
-      int end = (i + 1) * batch_size + offset; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.8816426241908337, 2.0934095498544254, conv2d_1_w, -0.33087718, 0.3323643, conv2d_1_b, -0.7782218, 0.6020472, 5, 5, 1, 1, 0, 2, 0, -0.978641152381897, 0.9989452958106995, 9); 
-      void* var_1 = ConvLayer_PROMISE(var_0, -0.978641152381897, 0.9989452958106995, conv2d_2_w, -0.2095158, 0.33543423, conv2d_2_b, -0.45020863, 0.30596754, 2, 2, 1, 1, 0, 2, 0, -0.9997039437294006, 0.999930202960968, 9); 
-      void* var_2 = ConvLayer_PROMISE(var_1, -0.9997039437294006, 0.999930202960968, conv2d_3_w, -0.1715614, 0.17037082, conv2d_3_b, -0.6519161, 0.5939945, 1, 1, 1, 1, -1, 0, 0, -0.9999336004257202, 0.999940037727356, 9); 
-      void* var_3 = ConvLayer_PROMISE(var_2, -0.9999336004257202, 0.999940037727356, conv2d_4_w, -0.15575546, 0.14456555, conv2d_4_b, -0.55873865, 0.4704539, 1, 1, 1, 1, -1, 0, 0, -0.9999991059303284, 0.9999993443489075, 9); 
-      void* var_4 = ConvLayer_PROMISE(var_3, -0.9999991059303284, 0.9999993443489075, conv2d_5_w, -0.16108225, 0.16864482, conv2d_5_b, -0.22135437, 0.10401678, 1, 1, 1, 1, 0, 2, 0, -0.9994344115257263, 0.9996342062950134, 9); 
-      void* var_5 = FCLayer_PROMISE(var_4, -0.9994344115257263, 0.9996342062950134, dense_1_w, -0.18183032, 0.19018902, dense_1_b, -0.07189204, 0.106005594, -1, -15.076565380096437, 19.422585220336913, 9); 
-      void* var_6 = tensorSoftmax(var_5); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_6); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-  }
-
-  //dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_valid.cc
deleted file mode 100644
index 7a0a40adb30367866635993de3de94ca1413938e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/alexnet_valid.cc
+++ /dev/null
@@ -1,90 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  int total_runs = 20; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 5000; 
-    int batch_size = 5000;
-    int offset = 0;
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../model_params/alexnet_cifar10_test/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-      std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-      void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-      std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-      void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-      std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-      void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-      std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-      void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = i * batch_size + offset; 
-      int end = (i + 1) * batch_size + offset; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.8816426241908337, 2.0934095498544254, conv2d_1_w, -0.33087718, 0.3323643, conv2d_1_b, -0.7782218, 0.6020472, 5, 5, 1, 1, 0, 2, 0, -0.978641152381897, 0.9989452958106995, 9); 
-      void* var_1 = ConvLayer_PROMISE(var_0, -0.978641152381897, 0.9989452958106995, conv2d_2_w, -0.2095158, 0.33543423, conv2d_2_b, -0.45020863, 0.30596754, 2, 2, 1, 1, 0, 2, 0, -0.9997039437294006, 0.999930202960968, 9); 
-      void* var_2 = ConvLayer_PROMISE(var_1, -0.9997039437294006, 0.999930202960968, conv2d_3_w, -0.1715614, 0.17037082, conv2d_3_b, -0.6519161, 0.5939945, 1, 1, 1, 1, -1, 0, 0, -0.9999336004257202, 0.999940037727356, 9); 
-      void* var_3 = ConvLayer_PROMISE(var_2, -0.9999336004257202, 0.999940037727356, conv2d_4_w, -0.15575546, 0.14456555, conv2d_4_b, -0.55873865, 0.4704539, 1, 1, 1, 1, -1, 0, 0, -0.9999991059303284, 0.9999993443489075, 9); 
-      void* var_4 = ConvLayer_PROMISE(var_3, -0.9999991059303284, 0.9999993443489075, conv2d_5_w, -0.16108225, 0.16864482, conv2d_5_b, -0.22135437, 0.10401678, 1, 1, 1, 1, 0, 2, 0, -0.9994344115257263, 0.9996342062950134, 9); 
-      void* var_5 = FCLayer_PROMISE(var_4, -0.9994344115257263, 0.9996342062950134, dense_1_w, -0.18183032, 0.19018902, dense_1_b, -0.07189204, 0.106005594, -1, -15.076565380096437, 19.422585220336913, 9); 
-      void* var_6 = tensorSoftmax(var_5); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_6); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/depthwise_batchnorm_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/depthwise_batchnorm_promise.cc
deleted file mode 100644
index 9ca67a6d6d6b11281ab36531d58e56c3e5fa4347..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/depthwise_batchnorm_promise.cc
+++ /dev/null
@@ -1,101 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  int total_runs = 1; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 10000; 
-    int batch_size = 10000; 
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../model_params/depthwise_batchnorm2/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-      void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-      void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-      void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-      void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-      std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-      void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-      std::string depthwise_conv2d_1_b_path =  dir_prefix + std::string("depthwise_conv2d_1_b.bin"); 
-      void* depthwise_conv2d_1_b =  readTrainedWeights(depthwise_conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-      void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-      void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-      void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-      void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,6272,1024); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-      std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-      void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-      std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-      void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = i * batch_size; 
-      int end = (i + 1) * batch_size; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,1,28,28); 
-
-      void* var_0 = ConvLayer_PROMISE(input, 0.0, 1.0, conv2d_1_w, -0.21894497, 0.20240873, conv2d_1_b, -0.2508162, 0.031047817, 2, 2, 1, 1, -1, 0, 1, 0.0, 0.3616602423787114, 9); 
-      void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorPooling(var_1,0,2,2,0,0,2,2); 
-      void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_4 = tensorAdd(var_3, depthwise_conv2d_1_b); 
-      void* var_5 = tensorRelu(var_4); 
-      void* var_6 = tensorBatchNorm(var_5, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_7 = FCLayer_PROMISE(var_6, -1.0247770547866821, 5.120966439247134, dense_1_w, -0.04442959, 0.04483322, dense_1_b, -0.001500695, 0.002055318, 1, 0.0, 4.769639563560498, 9); 
-      void* var_8 = FCLayer_PROMISE(var_7, 0.0, 4.769639563560498, dense_2_w, -0.123055816, 0.12699054, dense_2_b, -0.00430397, 0.004860983, 1, 0.0, 21.660391826629642, 9); 
-      void* var_9 = tensorSoftmax(var_8); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_9); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/depthwise_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/depthwise_promise.cc
deleted file mode 100644
index 176f1253f7d0120cc4c5600234e543edd1ddf4d1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/depthwise_promise.cc
+++ /dev/null
@@ -1,83 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(1); 
-
-  int total_runs = 1; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 10000; 
-    int batch_size = 10000; 
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../model_params/depthwise_test_8/");
-      
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-      std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-      void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-      std::string depthwise_conv2d_1_b_path =  dir_prefix + std::string("depthwise_conv2d_1_b.bin"); 
-      void* depthwise_conv2d_1_b =  readTrainedWeights(depthwise_conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,6272,1024); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-      std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-      void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-      std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-      void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = i * batch_size; 
-      int end = (i + 1) * batch_size; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,1,28,28); 
-
-      void* var_0 = ConvLayer_PROMISE(input, 0.0, 1.0, conv2d_1_w, -0.45243406, 0.4331673, conv2d_1_b, -0.2225991, 0.05682303, 2, 2, 1, 1, 0, 2, 1, 0.0, 2.593297730684286, 9); 
-      void* var_1 = tensorConvolution(var_0, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_2 = tensorAdd(var_1, depthwise_conv2d_1_b); 
-      void* var_3 = tensorRelu(var_2); 
-      void* var_4 = FCLayer_PROMISE(var_3, 0.0, 1.4831079334020663, dense_1_w, -0.1172131, 0.105426796, dense_1_b, -0.027105594, 0.04015947, 1, 0.0, 2.723612790107728, 9); 
-      void* var_5 = FCLayer_PROMISE(var_4, 0.0, 2.723612790107728, dense_2_w, -0.23769215, 0.20409682, dense_2_b, -0.004073992, 0.049675815, 1, 0.0, 24.92628944396973, 9); 
-      void* var_6 = tensorSoftmax(var_5); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_6); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/fc2_clipped_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/fc2_clipped_promise.cc
deleted file mode 100644
index 9bf088b1d5497d3f96d117d9c3d2cc79479d2f1e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/fc2_clipped_promise.cc
+++ /dev/null
@@ -1,122 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-#include "../../include/types.h"
-
-
-bool Opentuner_run = false;
-
-
-void test_4_Layer_clipped_FC(){
-
-  int total_runs = 100000;
-  if(Opentuner_run){
-    total_runs = 100000;
-  }
-  
-  printf("********* 3-Layer FC with clipped activations and weights ********* \n");
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-
-
-  clearTensorMap();
-
-  for(int i = 0; i < total_runs; i++){
-
-    void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				     float_type, test_batch_size, 1, 28, 28);  
-    void* fc1_weights = readTrainedWeights("../model_params/fc2_clipped/fc1.bin",
-					   float_type, 1, 1, 784, 128);  
-    void* fc1_bias = readTrainedWeights("../model_params/fc2_clipped/fc1_bias.bin",
-					float_type, 1, 128, 1, 1);  
-    void* fc2_weights = readTrainedWeights("../model_params/fc2_clipped/fc2.bin",
-					   float_type, 1, 1, 128, 10);  
-    void* fc2_bias = readTrainedWeights("../model_params/fc2_clipped/fc2_bias.bin",
-					float_type, 1, 10, 1, 1);
-    
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-      close(fd);
-    }
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    
-    void* fc1_out = FCLayer_PROMISE(input, 0,1, fc1_weights, -1,1, fc1_bias, -1,1,
-				    2, 0,2, 8);
-    
-    void* fc2_out = FCLayer_PROMISE(fc1_out, 0,2, fc2_weights, -1,1, fc2_bias, -1,1,
-				    2, 0,2, 8);
-     
-    void* result = tensorSoftmax(fc2_out);
-
-      
-    computeAccuracy2(labels, test_batch_size, result);
-
-    freeOutputTensors();  
-
-    
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-  printf("**** ALL RUNS COMPLETTED **** \n\n");
-  
-}
-
-
-
-int main(int argc, char* argv[]){
- 
-  if(argc > 1)
-    Opentuner_run = true;
-  
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_4_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/fc3_clipped_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/fc3_clipped_promise.cc
deleted file mode 100644
index 43eca42ee884e7f65a3ed5958c733cec64b820ed..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/fc3_clipped_promise.cc
+++ /dev/null
@@ -1,129 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-#include "../../include/types.h"
-
-
-bool Opentuner_run = false;
-
-
-void test_4_Layer_clipped_FC(){
-
-  int total_runs = 100000;
-  if(Opentuner_run){
-    total_runs = 100000;
-  }
-  
-  printf("********* 3-Layer FC with clipped activations and weights ********* \n");
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-
-
-  clearTensorMap();
-
-  for(int i = 0; i < total_runs; i++){
-
-    void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				     float_type, test_batch_size, 1, 28, 28);  
-    void* fc1_weights = readTrainedWeights("../model_params/fc3_clipped/fc1.bin",
-					   float_type, 1, 1, 784, 256);  
-    void* fc1_bias = readTrainedWeights("../model_params/fc3_clipped/fc1_bias.bin",
-					float_type, 1, 256, 1, 1);  
-    void* fc2_weights = readTrainedWeights("../model_params/fc3_clipped/fc2.bin",
-					   float_type, 1, 1, 256, 128);  
-    void* fc2_bias = readTrainedWeights("../model_params/fc3_clipped/fc2_bias.bin",
-					float_type, 1, 128, 1, 1);  
-    void* fc3_weights = readTrainedWeights("../model_params/fc3_clipped/fc3.bin",
-					   float_type, 1, 1, 128, 10);  
-    void* fc3_bias = readTrainedWeights("../model_params/fc3_clipped/fc3_bias.bin",
-					float_type, 1, 10, 1, 1);  
-
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-      close(fd);
-    }
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    
-    void* fc1_out = FCLayer_PROMISE(input, 0,1, fc1_weights, -1,1, fc1_bias, -1,1,
-				    2, 0,2, 8);
-    
-    void* fc2_out = FCLayer_PROMISE(fc1_out, 0,2, fc2_weights, -1,1, fc2_bias, -1,1,
-				    2, 0,2, 8);
-
-    void* fc3_out = FCLayer_PROMISE(fc2_out, 0,2, fc3_weights, -1,1, fc3_bias, -1,1,
-				    2, 0,2, 8);
-     
-    void* result = tensorSoftmax(fc3_out);
-
-      
-    computeAccuracy2(labels, test_batch_size, result);
-
-    freeOutputTensors();  
-
-    
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-    
-  }
-
-  printf("**** ALL RUNS COMPLETTED **** \n\n");
-  
-}
-
-
-
-int main(int argc, char* argv[]){
- 
-  if(argc > 1)
-    Opentuner_run = true;
-  
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_4_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/fc4_clipped_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/fc4_clipped_promise.cc
deleted file mode 100644
index e31002debd9793557e195f598a14a840b7014c28..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/fc4_clipped_promise.cc
+++ /dev/null
@@ -1,135 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-#include "../../include/types.h"
-
-
-bool Opentuner_run = false;
-
-
-void test_4_Layer_clipped_FC(){
-
-  int total_runs = 1000;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-  
-  printf("********* 3-Layer FC with clipped activations and weights ********* \n");
-
-  int test_batch_size = 5000;
-
-  uint8_t* labels = readLabels("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size);
-
-  
-  clearTensorMap();
-
-  for(int i = 0; i < total_runs; i++){
-
-    void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				     float_type, test_batch_size, 1, 28, 28);  
-    void* fc1_weights = readTrainedWeights("../model_params/fc4_clipped/fc1.bin",
-					   float_type, 1, 1, 784, 512);  
-    void* fc1_bias = readTrainedWeights("../model_params/fc4_clipped/fc1_bias.bin",
-					float_type, 1, 512, 1, 1);  
-    void* fc2_weights = readTrainedWeights("../model_params/fc4_clipped/fc2.bin",
-					   float_type, 1, 1, 512, 256);  
-    void* fc2_bias = readTrainedWeights("../model_params/fc4_clipped/fc2_bias.bin",
-					float_type, 1, 256, 1, 1);  
-    void* fc3_weights = readTrainedWeights("../model_params/fc4_clipped/fc3.bin",
-					   float_type, 1, 1, 256, 128);  
-    void* fc3_bias = readTrainedWeights("../model_params/fc4_clipped/fc3_bias.bin",
-					float_type, 1, 128, 1, 1);
-    void* fc4_weights = readTrainedWeights("../model_params/fc4_clipped/fc4.bin",
-					   float_type, 1, 1, 128, 10);  
-    void* fc4_bias = readTrainedWeights("../model_params/fc4_clipped/fc4_bias.bin",
-					float_type, 1, 10, 1, 1);  
-
-  
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-	abort();
-      }
-
-      close(fd);
-    }
-
-    
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-    
-    void* fc1_out = FCLayer_PROMISE(input, 0,1, fc1_weights, -1,1, fc1_bias, -1,1,
-				    2, 0,2, 9);
-    
-    void* fc2_out = FCLayer_PROMISE(fc1_out, 0,2, fc2_weights, -1,1, fc2_bias, -1,1,
-				    2, 0,2, 9);
-
-    void* fc3_out = FCLayer_PROMISE(fc2_out, 0,2, fc3_weights, -1,1, fc3_bias, -1,1,
-				    2, 0,2, 9);
-    
-    void* fc4_out = FCLayer_PROMISE(fc3_out, 0,2, fc4_weights, -1,1, fc4_bias, -1,1,
-				    2, 0,2, 9);
- 
-    void* result = tensorSoftmax(fc4_out);
-  
-    computeAccuracy2(labels, test_batch_size, result);
-
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-	printf("Invalid descriptor \n");
-	abort();
-      }
-      
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-
-  
-}
-
-
-
-int main(int argc, char* argv[]){
- 
-  if(argc > 1)
-    Opentuner_run = true;
-  
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_4_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/lenet_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/lenet_promise.cc
deleted file mode 100644
index 91026260a94c851234a865e83eda82f24eb2918c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/lenet_promise.cc
+++ /dev/null
@@ -1,137 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-int total_runs = 1;
-float bench_acc = 0;
-int to_skip = 5;
-
-int test_input_size = 3000; 
-int batch_size = 3000;
-int offset = 5000;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetTanh(){
-
-  printf("********* Lenet-5 Architecture ********** \n");
-  
-  std::string dir_prefix = std::string("../model_params/lenet_params/");   
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  
-  clearTensorMap();
-
-  int missed = 0;
-  for(int i = 0; i < total_runs; i++){
-
-    if (missed >= to_skip){
-      break;           
-    }   
-
-    int start = offset; 
-    int end = batch_size + offset; 
-
-    // Loading Input Batch
-    void* input = readInputBatch(input_path.c_str(),0,start,end,1,28,28); 
-
-    // Loading Weights
-    void* conv1_filter = readTrainedWeights("../model_params/lenet_keras/conv1.bin",
-					    float_type, 32, 1, 5, 5);    
-    void* conv1_bias = readTrainedWeights("../model_params/lenet_keras/conv1_bias.bin",
-					  float_type, 1, 32, 1, 1);  
-    void* conv2_filter = readTrainedWeights("../model_params/lenet_keras/conv2.bin",
-					    float_type, 64, 32, 5, 5);  
-    void* conv2_bias = readTrainedWeights("../model_params/lenet_keras/conv2_bias.bin",
-					  float_type, 1, 64, 1, 1);  
-    void* fc1_weights = readTrainedWeights("../model_params/lenet_keras/fc1.bin",
-					   float_type, 1, 1, 7*7*64, 1024);  
-    void* fc1_bias = readTrainedWeights("../model_params/lenet_keras/fc1_bias.bin",
-					float_type, 1, 1024, 1, 1);  
-    void* fc2_weights = readTrainedWeights("../model_params/lenet_keras/fc2.bin",
-					   float_type, 1, 1, 1024, 10);  
-    void* fc2_bias = readTrainedWeights("../model_params/lenet_keras/fc2_bias.bin",
-					float_type, 1, 10, 1, 1);  
-
-    
-    // DNN Operations
-    void* conv1_out = ConvLayer_PROMISE(input, 0,1, conv1_filter, -1,1, conv1_bias, -1,1,
-					2, 2, 1, 1, 0, 2, 0, -1,1, 9);
-    void* conv2_out = ConvLayer_PROMISE(conv1_out, -1,1, conv2_filter, -1,1,
-					conv2_bias, -1,1,
-					2, 2, 1, 1, 0, 2, 0, -1,1, 9);
-
-    void* fc1_out = FCLayer_PROMISE(conv2_out, -1,1, fc1_weights, -1,1, fc1_bias, -1,1,
-				    0, -1,1, 9);    
-    void* fc2_out = FCLayer_PROMISE(fc1_out, -1,1, fc2_weights, -1,1, fc2_bias, -1,1,
-				    0, -1,1, 9);
-
-    void* result = tensorSoftmax(fc2_out);
-
-    
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, result);
-
-    
-    freeOutputTensors();  
-
-    dumpFinalAccuracy(accuracy); 
-
-
-    if (accuracy < bench_acc)
-      missed += 1;
-        
-  }
-
-  dumpExecutionAccuracies(); 
-}
-
-
-int main(int argc, char* argv[]){
-
-
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  if (argc > 2){
-    bench_acc = atof(argv[2]);
-  }
-
-  if(argc > 3){
-    to_skip = atoi(argv[3]);   
-  }
-    
-  if(argc > 4){
-    test_input_size = atoi(argv[4]);
-    batch_size = atoi(argv[4]);
-  }
-
-  if(argc > 5){
-    offset = atoi(argv[5]);   
-  }
-
-
-  
-  llvm_hpvm_initTensorRt(1);
-
-  testLenetTanh();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/lenet_promise_relu.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/lenet_promise_relu.cc
deleted file mode 100644
index 5c7699026fe6e0860718e5986f4fec990ab08c6c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/lenet_promise_relu.cc
+++ /dev/null
@@ -1,65 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/lenet_relu/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  void* input = readTrainedWeights(input_path.c_str(), 0,10000,1,28,28); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,5,5); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,3136,1024); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-  void* var_0 = ConvLayer_PROMISE(input, 0.0, 1.0, conv2d_1_w, -0.2722561, 0.25817025,
-				  conv2d_1_b, -0.041063767, 0.031912163,
-				  2, 2, 1, 1, 0, 2, 1, 0.0, 1.5512946, 9); 
-  void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 1.5512946, conv2d_2_w, -0.17580177, 0.16332611,
-				  conv2d_2_b, -0.041385915, 0.05869476,
-				  2, 2, 1, 1, -1, 0, 1, 0.0, 4.916329, 9); 
-  void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 4.916329, conv2d_3_w, -0.20324017, 0.18275258,
-				  conv2d_3_b, -0.039915435, 0.04589232,
-				  1, 1, 2, 2, -1, 0, 1, 0.0, 9.447418, 9); 
-  void* var_3 = FCLayer_PROMISE(var_2, 0.0, 9.447418, dense_1_w, -0.10757191, 0.123126,
-				dense_1_b, -0.025070198, 0.027000334, 1, 0.0, 9.926857, 9); 
-  void* var_4 = FCLayer_PROMISE(var_3, 0.0, 9.926857, dense_2_w, -0.18867673, 0.16425411,
-				dense_2_b, -0.012622595, 0.04586973, 1, 0.0, 42.018578, 9); 
-  void* var_5 = tensorSoftmax(var_4); 
-
-  computeAccuracy2(labels,10000,var_5); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_promise.cc
deleted file mode 100644
index cbfc534681efdea9967bd6d0096572e2bad87c16..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_promise.cc
+++ /dev/null
@@ -1,454 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-int total_runs = 1;
-float bench_acc = 0;
-int to_skip = 5;
-
-
-int main(int argc, char* argv[]){ 
-
-  int test_input_size = 3000; 
-  int batch_size = 1000;
-  int offset = 5000;
-  
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  if (argc > 2){
-    bench_acc = atof(argv[2]);
-  }
-
-  if(argc > 3){
-    to_skip = atoi(argv[3]);   
-  }
-
-  if(argc > 4){
-    test_input_size = atoi(argv[4]);   
-  }
-
-  if(argc > 5){
-    offset = atoi(argv[5]);   
-  }
-
-    
-  llvm_hpvm_initTensorRt(1); 
-
-
-  int missed = 0;
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    if (missed >= to_skip){
-      break;           
-    }
-
-    startMemTracking(); 
-
-    
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-
-      std::string dir_prefix = std::string("../model_params/mobilenet/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-      std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-      void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-      void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-      void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-      void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-      std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-      void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-      std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-      void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-      void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-      void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-      void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-      std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-      void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-      void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-      void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-      void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-      std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-      void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-      std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-      void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-      void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-      void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-      void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-      std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-      void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-      void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-      void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-      void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-      std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-      void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-      std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-      void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-      void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-      void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-      void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-      std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-      void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-      void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-      void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-      void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-      std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-      void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-      std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-      void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-      void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-      void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-      void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-      std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-      void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-      void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-      void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-      void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-      std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-      void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-      std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-      void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-      void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-      void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-      void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-      std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-      void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-      void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-      void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-      void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-      std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-      void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-      std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-      void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-      void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-      void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-      void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-      void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-      std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-      void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-      void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-      void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-      void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-      void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-      void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-      void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-      void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-      void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-      void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-      void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-      void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-      void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-      void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-      void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-      void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-      void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-      void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-      void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-      void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-      void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-      void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-      void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-      void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-      void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-      void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-      void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-      void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-      void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-      void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-      void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-      void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-      void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-      void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-      void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-      void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-      void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-      void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-      void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-      void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-      void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-      void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-      void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-      void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-      void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-      void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-      void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-      void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-      void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-      void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-      void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-      void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-      void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-      void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-      void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-      void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-      void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-      void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-      void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-      void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-      std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-      void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-      void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-      void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-      void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-      std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-      void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-      std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-      void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-      void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-      void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-      void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-      std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-      void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-      std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-      void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-      void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-      void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-      void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = i * batch_size + offset; 
-      int end = (i + 1) * batch_size + offset; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -2.196306920051575, 1.347581704139706, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -60.89275047302246, 51.99256916046146, 9); 
-      void* var_1 = tensorHalfBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorRelu(var_1); 
-      void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_4 = tensorHalfBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_5 = tensorRelu(var_4); 
-      void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 5.713541553974245, conv2d_2_w, -0.9317721160650253, 1.0774258937835774, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.518589503288269, 6.810842518806449, 9); 
-      void* var_7 = tensorHalfBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-      void* var_8 = tensorRelu(var_7); 
-      void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-      void* var_10 = tensorHalfBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-      void* var_11 = tensorRelu(var_10); 
-      void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.932139402866376, conv2d_3_w, -0.5316544661521911, 0.5753790403604531, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.482631235122681, 3.96730119752885, 9); 
-      void* var_13 = tensorHalfBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-      void* var_14 = tensorRelu(var_13); 
-      void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-      void* var_16 = tensorHalfBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-      void* var_17 = tensorRelu(var_16); 
-      void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.103263397693674, conv2d_4_w, -0.36234098821878435, 0.4076913900375366, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.04261828327179, 3.88677932929993, 9); 
-      void* var_19 = tensorHalfBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-      void* var_20 = tensorRelu(var_19); 
-      void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-      void* var_22 = tensorHalfBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-      void* var_23 = tensorRelu(var_22); 
-      void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 5.383221302509475, conv2d_5_w, -0.3131200549006462, 0.29357679939270065, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.921469215393066, 4.338679324150087, 9); 
-      void* var_25 = tensorHalfBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-      void* var_26 = tensorRelu(var_25); 
-      void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-      void* var_28 = tensorHalfBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-      void* var_29 = tensorRelu(var_28); 
-      void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 4.316738154411368, conv2d_6_w, -0.23299247801303866, 0.2580290257930756, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.207789947509766, 3.932436970710759, 9); 
-      void* var_31 = tensorHalfBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-      void* var_32 = tensorRelu(var_31); 
-      void* var_33 = tensorConvolution(var_32, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-      void* var_34 = tensorHalfBatchNorm(var_33, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-      void* var_35 = tensorRelu(var_34); 
-      void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 5.830408106803901, conv2d_7_w, -0.20233777219057084, 0.18998308175802117, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.298286915779113, 4.848135117530843, 9); 
-      void* var_37 = tensorHalfBatchNorm(var_36, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_39 = tensorConvolution(var_38, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-      void* var_40 = tensorHalfBatchNorm(var_39, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-      void* var_41 = tensorRelu(var_40); 
-      void* var_42 = ConvLayer_PROMISE(var_41, 0.0, 4.446417809963227, conv2d_8_w, -0.17442735651135444, 0.17695830866694454, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.347910885810852, 3.6144364695549145, 9); 
-      void* var_43 = tensorHalfBatchNorm(var_42, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-      void* var_44 = tensorRelu(var_43); 
-      void* var_45 = tensorConvolution(var_44, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-      void* var_46 = tensorHalfBatchNorm(var_45, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-      void* var_47 = tensorRelu(var_46); 
-      void* var_48 = ConvLayer_PROMISE(var_47, 0.0, 4.518095604896667, conv2d_9_w, -0.14546796187758446, 0.15256431668996823, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.0287702755928043, 2.9487365779876953, 9); 
-      void* var_49 = tensorHalfBatchNorm(var_48, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-      void* var_50 = tensorRelu(var_49); 
-      void* var_51 = tensorConvolution(var_50, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-      void* var_52 = tensorHalfBatchNorm(var_51, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-      void* var_53 = tensorRelu(var_52); 
-      void* var_54 = ConvLayer_PROMISE(var_53, 0.0, 6.348575634956407, conv2d_10_w, -0.13025874522328376, 0.13558243343234128, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.2293100805282595, 3.5315046372413645, 9); 
-      void* var_55 = tensorHalfBatchNorm(var_54, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-      void* var_56 = tensorRelu(var_55); 
-      void* var_57 = tensorConvolution(var_56, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-      void* var_58 = tensorHalfBatchNorm(var_57, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-      void* var_59 = tensorRelu(var_58); 
-      void* var_60 = ConvLayer_PROMISE(var_59, 0.0, 5.221003110408843, conv2d_11_w, -0.11900172759592534, 0.12536374783515936, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.038203780174255, 4.004009407043483, 9); 
-      void* var_61 = tensorHalfBatchNorm(var_60, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-      void* var_62 = tensorRelu(var_61); 
-      void* var_63 = tensorConvolution(var_62, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-      void* var_64 = tensorHalfBatchNorm(var_63, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-      void* var_65 = tensorRelu(var_64); 
-      void* var_66 = ConvLayer_PROMISE(var_65, 0.0, 5.732498347759442, conv2d_12_w, -0.10839721685647964, 0.11625668607652187, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.3111015114784244, 4.462933233261136, 9); 
-      void* var_67 = tensorHalfBatchNorm(var_66, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-      void* var_68 = tensorRelu(var_67); 
-      void* var_69 = tensorConvolution(var_68, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-      void* var_70 = tensorHalfBatchNorm(var_69, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-      void* var_71 = tensorHalfRelu(var_70); 
-      void* var_72 = ConvLayer_PROMISE(var_71, 0.0, 7.240498211860681, conv2d_13_w, -0.08623744961619377, 0.08859449951350662, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.175431394577027, 6.2043294754027345, 9); 
-      void* var_73 = tensorHalfBatchNorm(var_72, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-      void* var_74 = tensorHalfRelu(var_73); 
-      void* var_75 = tensorConvolution(var_74, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-      void* var_76 = tensorHalfBatchNorm(var_75, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-      void* var_77 = tensorRelu(var_76); 
-      void* var_78 = ConvLayer_PROMISE(var_77, 0.0, 7.813958834648251, conv2d_14_w, -0.06813025139272214, 0.07002027779817581, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -10.920566423416137, 2.6442912578582534, 9); 
-      void* var_79 = tensorHalfBatchNorm(var_78, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-      void* var_80 = tensorHalfRelu(var_79); 
-      void* var_81 = tensorHalfPooling(var_80,1,2,2,0,0,2,2); 
-      void* var_82 = FCLayer_PROMISE(var_81, 0.0, 2.8692066650391013, dense_1_w, -0.22301019695401192, 0.1442659378200768, dense_1_b, -0.1654396, 0.23336112, -1, -12.245949958801269, 23.80532513427739, 9); 
-      void* var_83 = tensorSoftmax(var_82); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_83); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-    if (final_accuracy < bench_acc)
-     missed += 1;
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_quant.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_quant.cc
deleted file mode 100644
index 3cb28def9b48bf29f3cffd5611991b0fbaeb4c55..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_quant.cc
+++ /dev/null
@@ -1,419 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(1); 
-
-int total_runs = 1; 
-for (int i = 0 ; i < total_runs; i++){ 
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 2000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-
-std::string dir_prefix = std::string("../../keras/data/mobilenet_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -2.196306920051575, 1.347581704139706, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -60.89275047302246, 51.99256916046146, 9); 
-void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_2 = tensorRelu(var_1); 
-void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_5 = tensorRelu(var_4); 
-void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 5.713541553974245, conv2d_2_w, -0.9317721160650253, 1.0774258937835774, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.518589503288269, 6.810842518806449, 9); 
-void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_8 = tensorRelu(var_7); 
-void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_11 = tensorRelu(var_10); 
-void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.932139402866376, conv2d_3_w, -0.5316544661521911, 0.5753790403604531, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.482631235122681, 3.96730119752885, 9); 
-void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_14 = tensorRelu(var_13); 
-void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_17 = tensorRelu(var_16); 
-void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.103263397693674, conv2d_4_w, -0.36234098821878435, 0.4076913900375366, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.04261828327179, 3.88677932929993, 9); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-void* var_22 = tensorBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_23 = tensorRelu(var_22); 
-void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 5.383221302509475, conv2d_5_w, -0.3131200549006462, 0.29357679939270065, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.921469215393066, 4.338679324150087, 9); 
-void* var_25 = tensorBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_26 = tensorRelu(var_25); 
-void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-void* var_28 = tensorBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_29 = tensorRelu(var_28); 
-void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 4.316738154411368, conv2d_6_w, -0.23299247801303866, 0.2580290257930756, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.207789947509766, 3.932436970710759, 9); 
-void* var_31 = tensorBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_32 = tensorRelu(var_31); 
-void* var_33 = tensorConvolution(var_32, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-void* var_34 = tensorBatchNorm(var_33, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_35 = tensorRelu(var_34); 
-void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 5.830408106803901, conv2d_7_w, -0.20233777219057084, 0.18998308175802117, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.298286915779113, 4.848135117530843, 9); 
-void* var_37 = tensorBatchNorm(var_36, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorConvolution(var_38, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-void* var_40 = tensorBatchNorm(var_39, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-void* var_41 = tensorRelu(var_40); 
-void* var_42 = ConvLayer_PROMISE(var_41, 0.0, 4.446417809963227, conv2d_8_w, -0.17442735651135444, 0.17695830866694454, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.347910885810852, 3.6144364695549145, 9); 
-void* var_43 = tensorBatchNorm(var_42, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-void* var_44 = tensorRelu(var_43); 
-void* var_45 = tensorConvolution(var_44, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-void* var_46 = tensorBatchNorm(var_45, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-void* var_47 = tensorRelu(var_46); 
-void* var_48 = ConvLayer_PROMISE(var_47, 0.0, 4.518095604896667, conv2d_9_w, -0.14546796187758446, 0.15256431668996823, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.0287702755928043, 2.9487365779876953, 9); 
-void* var_49 = tensorBatchNorm(var_48, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-void* var_50 = tensorRelu(var_49); 
-void* var_51 = tensorConvolution(var_50, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-void* var_52 = tensorBatchNorm(var_51, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-void* var_53 = tensorRelu(var_52); 
-void* var_54 = ConvLayer_PROMISE(var_53, 0.0, 6.348575634956407, conv2d_10_w, -0.13025874522328376, 0.13558243343234128, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.2293100805282595, 3.5315046372413645, 9); 
-void* var_55 = tensorBatchNorm(var_54, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-void* var_56 = tensorRelu(var_55); 
-void* var_57 = tensorConvolution(var_56, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-void* var_58 = tensorBatchNorm(var_57, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-void* var_59 = tensorRelu(var_58); 
-void* var_60 = ConvLayer_PROMISE(var_59, 0.0, 5.221003110408843, conv2d_11_w, -0.11900172759592534, 0.12536374783515936, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.038203780174255, 4.004009407043483, 9); 
-void* var_61 = tensorBatchNorm(var_60, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-void* var_62 = tensorRelu(var_61); 
-void* var_63 = tensorConvolution(var_62, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-void* var_64 = tensorBatchNorm(var_63, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-void* var_65 = tensorRelu(var_64); 
-void* var_66 = ConvLayer_PROMISE(var_65, 0.0, 5.732498347759442, conv2d_12_w, -0.10839721685647964, 0.11625668607652187, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.3111015114784244, 4.462933233261136, 9); 
-void* var_67 = tensorBatchNorm(var_66, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-void* var_68 = tensorRelu(var_67); 
-void* var_69 = tensorConvolution(var_68, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-void* var_70 = tensorBatchNorm(var_69, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-void* var_71 = tensorRelu(var_70); 
-void* var_72 = ConvLayer_PROMISE(var_71, 0.0, 7.240498211860681, conv2d_13_w, -0.08623744961619377, 0.08859449951350662, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.175431394577027, 6.2043294754027345, 9); 
-void* var_73 = tensorBatchNorm(var_72, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-void* var_74 = tensorRelu(var_73); 
-void* var_75 = tensorConvolution(var_74, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-void* var_76 = tensorBatchNorm(var_75, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-void* var_77 = tensorRelu(var_76); 
-void* var_78 = ConvLayer_PROMISE(var_77, 0.0, 7.813958834648251, conv2d_14_w, -0.06813025139272214, 0.07002027779817581, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -10.920566423416137, 2.6442912578582534, 9); 
-void* var_79 = tensorBatchNorm(var_78, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-void* var_80 = tensorRelu(var_79); 
-void* var_81 = tensorPooling(var_80,1,2,2,0,0,2,2); 
-void* var_82 = FCLayer_PROMISE(var_81, 0.0, 2.8692066650391013, dense_1_w, -0.22301019695401192, 0.1442659378200768, dense_1_b, -0.1654396, 0.23336112, -1, -12.245949958801269, 23.80532513427739, 9); 
-void* var_83 = tensorSoftmax(var_82); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_83); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-}
-
-dumpExecutionAccuracies(); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_shallow_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_shallow_promise.cc
deleted file mode 100644
index 7a8136d7d3f66e971f010f17bda1b78dde8ee181..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_shallow_promise.cc
+++ /dev/null
@@ -1,270 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-int total_runs = 1;
-float bench_acc = 0;
-int to_skip = 5;
-
-
-int main(int argc, char* argv[]){ 
-
-  int test_input_size = 3000; 
-  int batch_size = 1000;
-  int offset = 5000;
-
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  if (argc > 2){
-    bench_acc = atof(argv[2]);
-  }
-
-  if(argc > 3){
-    to_skip = atoi(argv[3]);   
-  }
-
-  if(argc > 4){
-    test_input_size = atoi(argv[4]);   
-  }
-
-  if(argc > 5){
-    offset = atoi(argv[5]);   
-  }
-
-  
-  llvm_hpvm_initTensorRt(1); 
-
-  int missed = 0;
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    if (missed >= to_skip){
-     break;           
-    }
-
-    startMemTracking(); 
-    
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../model_params/mobilenet_shallow/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-      std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-      void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-      void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-      void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-      void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-      std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-      void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-      std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-      void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-      void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-      void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-      void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-      std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-      void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-      void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-      void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-      void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-      std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-      void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-      std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-      void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-      void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-      void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-      void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-      std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-      void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-      void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-      void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-      void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-      std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-      void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-      std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-      void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-      void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-      void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-      void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-      std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-      void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-      void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-      void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-      void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-      std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-      void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-      std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-      void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-      void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-      void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-      void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-      std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-      void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-      void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-      void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-      void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-      std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-      void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-      std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-      void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-      void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-      void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-      void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-      std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-      void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-      void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-      void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-      void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-      std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-      void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-      std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-      void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-      void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-      void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-      void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-      void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-      std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-      void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-      void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-      void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-      void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = i * batch_size + offset; 
-      int end = (i + 1) * batch_size + offset; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -1.5164621164798737, 1.6472081774473288, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -9.868980642318725, 10.560956018447879, 9); 
-      void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorRelu(var_1); 
-      void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_5 = tensorRelu(var_4); 
-      void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 6.821381127357554, conv2d_2_w, -1.1834390873908995, 1.2731596627235617, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -9.875998497009277, 7.51305247974393, 9); 
-      void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-      void* var_8 = tensorRelu(var_7); 
-      void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-      void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-      void* var_11 = tensorRelu(var_10); 
-      void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.826067455768602, conv2d_3_w, -0.599876856982708, 0.6812073457241064, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.633289833068848, 5.177892235755925, 9); 
-      void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-      void* var_14 = tensorRelu(var_13); 
-      void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-      void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-      void* var_17 = tensorRelu(var_16); 
-      void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.02646304416659, conv2d_4_w, -0.4555967862010002, 0.4942613914608956, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.316803941726685, 4.605850250244146, 9); 
-      void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-      void* var_20 = tensorRelu(var_19); 
-      void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-      void* var_22 = tensorBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-      void* var_23 = tensorRelu(var_22); 
-      void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 4.532649063110355, conv2d_5_w, -0.35657615590095515, 0.3382165088057521, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.1012511816024775, 4.3630500688553, 9); 
-      void* var_25 = tensorBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-      void* var_26 = tensorRelu(var_25); 
-      void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-      void* var_28 = tensorBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-      void* var_29 = tensorRelu(var_28); 
-      void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 3.9874704387188977, conv2d_6_w, -0.28502783328294756, 0.28604640334844594, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.243851703643799, 3.486250406742097, 9); 
-      void* var_31 = tensorBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-      void* var_32 = tensorRelu(var_31); 
-      void* var_33 = tensorConvolution(var_32, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-      void* var_34 = tensorBatchNorm(var_33, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-      void* var_35 = tensorRelu(var_34); 
-      void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 6.563065901756522, conv2d_7_w, -0.18946402323246003, 0.19012390717864017, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.938115713119507, 3.538363476753238, 9); 
-      void* var_37 = tensorBatchNorm(var_36, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_39 = tensorPooling(var_38,1,2,2,0,0,2,2); 
-      void* var_40 = FCLayer_PROMISE(var_39, 0.0, 1.8908388000727185, dense_1_w, -0.35140394401550296, 0.422872786462307, dense_1_b, -0.23878151, 0.26507422, -1, -14.630816223144532, 27.27252123260504, 9); 
-      void* var_41 = tensorSoftmax(var_40); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_41); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-
-    if (final_accuracy < bench_acc)
-      missed += 1;
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_shallow_quant.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_shallow_quant.cc
deleted file mode 100644
index c3f11e1f2ff7f0a255d40ecd5916fbdada2b0be3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_shallow_quant.cc
+++ /dev/null
@@ -1,210 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(1); 
-
-  int total_runs = 1;
- 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 10000; 
-    int batch_size = 2000; 
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../../keras/data/mobilenet_shallow/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-      std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-      void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-      void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-      void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-      void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-      std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-      void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-      std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-      void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-      void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-      void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-      void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-      std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-      void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-      void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-      void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-      void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-      std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-      void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-      std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-      void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-      void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-      void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-      void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,64,1,1); 
-      std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-      void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-      void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-      void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-      void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,64,1,1); 
-      std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-      void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,64,1,3,3); 
-      std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-      void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-      void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-      void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-      void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,64,1,1); 
-      std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-      void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-      void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-      void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-      void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-      std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-      void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-      std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-      void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-      void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-      void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-      void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-      std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-      void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-      void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-      void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-      void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-      std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-      void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-      std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-      void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-      void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-      void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-      void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-      std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-      void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-      void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-      void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-      void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = i * batch_size; 
-      int end = (i + 1) * batch_size; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -1.340709443449974, 1.3555025291442875, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -7.2273098745346065, 8.197232282638556, 9); 
-      void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorRelu(var_1); 
-      void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_5 = tensorRelu(var_4); 
-      void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 6.053754347324407, conv2d_2_w, -1.1412922372817993, 0.9433415410518639, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.87497807598114, 5.3558874282836655, 9); 
-      void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-      void* var_8 = tensorRelu(var_7); 
-      void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-      void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-      void* var_11 = tensorRelu(var_10); 
-      void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.144686742782682, conv2d_3_w, -0.6453772538900375, 0.6694499132037164, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.349411018371582, 4.33332164001466, 9); 
-      void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-      void* var_14 = tensorRelu(var_13); 
-      void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 2, 2, 1, 64); 
-      void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-      void* var_17 = tensorRelu(var_16); 
-      void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.248231422424324, conv2d_4_w, -0.44596208560466766, 0.49276923143864204, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.3628717079162596, 3.058014160633088, 9); 
-      void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-      void* var_20 = tensorRelu(var_19); 
-      void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-      void* var_22 = tensorBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-      void* var_23 = tensorRelu(var_22); 
-      void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 5.569213481903169, conv2d_5_w, -0.3239764194488525, 0.2983359285593033, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.473401127815246, 4.425663429260224, 9); 
-      void* var_25 = tensorBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-      void* var_26 = tensorRelu(var_25); 
-      void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-      void* var_28 = tensorBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-      void* var_29 = tensorRelu(var_28); 
-      void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 3.728998669862753, conv2d_6_w, -0.2290773878991604, 0.22830345794558554, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.1163714165687564, 2.065946404457088, 9); 
-      void* var_31 = tensorBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-      void* var_32 = tensorRelu(var_31); 
-      void* var_33 = tensorPooling(var_32,1,2,2,0,0,2,2); 
-      void* var_34 = FCLayer_PROMISE(var_33, 0.0, 2.191649281263443, dense_1_w, -0.3627079802751541, 0.3849146918058397, dense_1_b, -0.37858343, 0.377391, -1, -11.424064125061035, 18.695249080657973, 9); 
-      void* var_35 = tensorSoftmax(var_34); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_35); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_shallow_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_shallow_valid.cc
deleted file mode 100644
index ba2a14d990a7b7d3d1cc6ad4bc818b2c199a0c6b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_shallow_valid.cc
+++ /dev/null
@@ -1,235 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  int total_runs = 40; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 4000; 
-    int batch_size = 2000; 
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../../keras/data/mobilenet_shallow_nathan/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-      std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-      void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-      void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-      void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-      void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-      std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-      void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-      std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-      void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-      void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-      void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-      void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-      std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-      void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-      void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-      void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-      void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-      std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-      void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-      std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-      void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-      void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-      void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-      void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-      std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-      void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-      void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-      void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-      void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-      std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-      void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-      std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-      void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-      void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-      void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-      void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-      std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-      void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-      void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-      void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-      void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-      std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-      void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-      std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-      void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-      void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-      void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-      void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-      std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-      void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-      void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-      void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-      void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-      std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-      void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-      std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-      void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-      void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-      void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-      void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-      std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-      void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-      void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-      void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-      void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-      std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-      void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-      std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-      void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-      void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-      void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-      void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-      void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-      std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-      void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-      void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-      void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-      void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = (i * batch_size) + 4000; 
-      int end = ((i + 1) * batch_size) + 4000; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -1.5164621164798737, 1.6472081774473288, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -9.868980642318725, 10.560956018447879, 9); 
-      void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorRelu(var_1); 
-      void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_5 = tensorRelu(var_4); 
-      void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 6.821381127357554, conv2d_2_w, -1.1834390873908995, 1.2731596627235617, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -9.875998497009277, 7.51305247974393, 9); 
-      void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-      void* var_8 = tensorRelu(var_7); 
-      void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-      void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-      void* var_11 = tensorRelu(var_10); 
-      void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.826067455768602, conv2d_3_w, -0.599876856982708, 0.6812073457241064, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.633289833068848, 5.177892235755925, 9); 
-      void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-      void* var_14 = tensorRelu(var_13); 
-      void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-      void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-      void* var_17 = tensorRelu(var_16); 
-      void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.02646304416659, conv2d_4_w, -0.4555967862010002, 0.4942613914608956, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.316803941726685, 4.605850250244146, 9); 
-      void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-      void* var_20 = tensorRelu(var_19); 
-      void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-      void* var_22 = tensorBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-      void* var_23 = tensorRelu(var_22); 
-      void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 4.532649063110355, conv2d_5_w, -0.35657615590095515, 0.3382165088057521, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.1012511816024775, 4.3630500688553, 9); 
-      void* var_25 = tensorBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-      void* var_26 = tensorRelu(var_25); 
-      void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-      void* var_28 = tensorBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-      void* var_29 = tensorRelu(var_28); 
-      void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 3.9874704387188977, conv2d_6_w, -0.28502783328294756, 0.28604640334844594, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.243851703643799, 3.486250406742097, 9); 
-      void* var_31 = tensorBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-      void* var_32 = tensorRelu(var_31); 
-      void* var_33 = tensorConvolution(var_32, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-      void* var_34 = tensorBatchNorm(var_33, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-      void* var_35 = tensorRelu(var_34); 
-      void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 6.563065901756522, conv2d_7_w, -0.18946402323246003, 0.19012390717864017, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.938115713119507, 3.538363476753238, 9); 
-      void* var_37 = tensorBatchNorm(var_36, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_39 = tensorPooling(var_38,1,2,2,0,0,2,2); 
-      void* var_40 = FCLayer_PROMISE(var_39, 0.0, 1.8908388000727185, dense_1_w, -0.35140394401550296, 0.422872786462307, dense_1_b, -0.23878151, 0.26507422, -1, -14.630816223144532, 27.27252123260504, 9); 
-      void* var_41 = tensorSoftmax(var_40); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_41); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_valid.cc
deleted file mode 100644
index 8cf22349346230889f9a4f1385b94ceabe04612e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/mobilenet_valid.cc
+++ /dev/null
@@ -1,418 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(1); 
-
-  int total_runs = 40; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 4000; 
-    int batch_size = 2000; 
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-
-      std::string dir_prefix = std::string("../../keras/data/mobilenet_quant/"); 
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-      std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-      void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-      void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-      void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-      void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-      std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-      void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-      std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-      void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-      void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-      void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-      std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-      void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-      std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-      void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-      void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-      void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-      void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-      std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-      void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-      std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-      void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-      void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-      void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-      std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-      void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-      std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-      void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-      void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-      void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-      void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-      std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-      void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-      std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-      void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-      void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-      void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-      void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-      std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-      void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-      void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-      void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-      void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-      std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-      void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-      std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-      void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-      void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-      void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-      std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-      void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-      std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-      void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-      void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-      void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-      void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-      std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-      void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-      std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-      void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-      void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-      void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-      void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-      std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-      void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-      void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-      void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-      void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-      std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-      void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-      std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-      void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-      void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-      void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-      std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-      void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-      std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-      void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-      std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-      void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-      void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-      void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-      void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-      void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-      void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-      void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-      void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-      void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-      void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-      void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-      void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-      void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-      void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-      void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-      void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-      void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-      void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-      void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-      void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-      void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-      void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-      void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-      void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-      void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-      void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-      void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-      void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-      void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-      void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-      void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-      void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-      void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-      void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-      void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-      void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-      void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-      void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-      void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-      void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-      void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-      void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-      void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-      void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-      void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-      void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-      void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-      void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-      void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-      void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-      std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-      void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-      void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-      void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-      void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-      std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-      void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-      std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-      void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-      void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-      void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-      std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-      void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-      std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-      void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-      std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-      void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-      void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-      void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-      void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-      std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-      void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-      std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-      void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-      void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-      void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-      void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-      std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-      void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-      std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-      void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-      void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-      void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-      std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-      void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-      int start = (i * batch_size) + 4000; 
-      int end = ((i + 1) * batch_size) + 4000; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -2.196306920051575, 1.347581704139706, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -60.89275047302246, 51.99256916046146, 9); 
-      void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-      void* var_2 = tensorRelu(var_1); 
-      void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-      void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-      void* var_5 = tensorRelu(var_4); 
-      void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 5.713541553974245, conv2d_2_w, -0.9317721160650253, 1.0774258937835774, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.518589503288269, 6.810842518806449, 9); 
-      void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-      void* var_8 = tensorRelu(var_7); 
-      void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-      void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-      void* var_11 = tensorRelu(var_10); 
-      void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.932139402866376, conv2d_3_w, -0.5316544661521911, 0.5753790403604531, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.482631235122681, 3.96730119752885, 9); 
-      void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-      void* var_14 = tensorRelu(var_13); 
-      void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-      void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-      void* var_17 = tensorRelu(var_16); 
-      void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.103263397693674, conv2d_4_w, -0.36234098821878435, 0.4076913900375366, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.04261828327179, 3.88677932929993, 9); 
-      void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-      void* var_20 = tensorRelu(var_19); 
-      void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-      void* var_22 = tensorBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-      void* var_23 = tensorRelu(var_22); 
-      void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 5.383221302509475, conv2d_5_w, -0.3131200549006462, 0.29357679939270065, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.921469215393066, 4.338679324150087, 9); 
-      void* var_25 = tensorBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-      void* var_26 = tensorRelu(var_25); 
-      void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-      void* var_28 = tensorBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-      void* var_29 = tensorRelu(var_28); 
-      void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 4.316738154411368, conv2d_6_w, -0.23299247801303866, 0.2580290257930756, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.207789947509766, 3.932436970710759, 9); 
-      void* var_31 = tensorBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-      void* var_32 = tensorRelu(var_31); 
-      void* var_33 = tensorConvolution(var_32, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-      void* var_34 = tensorBatchNorm(var_33, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-      void* var_35 = tensorRelu(var_34); 
-      void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 5.830408106803901, conv2d_7_w, -0.20233777219057084, 0.18998308175802117, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.298286915779113, 4.848135117530843, 9); 
-      void* var_37 = tensorBatchNorm(var_36, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_39 = tensorConvolution(var_38, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-      void* var_40 = tensorBatchNorm(var_39, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-      void* var_41 = tensorRelu(var_40); 
-      void* var_42 = ConvLayer_PROMISE(var_41, 0.0, 4.446417809963227, conv2d_8_w, -0.17442735651135444, 0.17695830866694454, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.347910885810852, 3.6144364695549145, 9); 
-      void* var_43 = tensorBatchNorm(var_42, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-      void* var_44 = tensorRelu(var_43); 
-      void* var_45 = tensorConvolution(var_44, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-      void* var_46 = tensorBatchNorm(var_45, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-      void* var_47 = tensorRelu(var_46); 
-      void* var_48 = ConvLayer_PROMISE(var_47, 0.0, 4.518095604896667, conv2d_9_w, -0.14546796187758446, 0.15256431668996823, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.0287702755928043, 2.9487365779876953, 9); 
-      void* var_49 = tensorBatchNorm(var_48, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-      void* var_50 = tensorRelu(var_49); 
-      void* var_51 = tensorConvolution(var_50, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-      void* var_52 = tensorBatchNorm(var_51, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-      void* var_53 = tensorRelu(var_52); 
-      void* var_54 = ConvLayer_PROMISE(var_53, 0.0, 6.348575634956407, conv2d_10_w, -0.13025874522328376, 0.13558243343234128, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.2293100805282595, 3.5315046372413645, 9); 
-      void* var_55 = tensorBatchNorm(var_54, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-      void* var_56 = tensorRelu(var_55); 
-      void* var_57 = tensorConvolution(var_56, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-      void* var_58 = tensorBatchNorm(var_57, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-      void* var_59 = tensorRelu(var_58); 
-      void* var_60 = ConvLayer_PROMISE(var_59, 0.0, 5.221003110408843, conv2d_11_w, -0.11900172759592534, 0.12536374783515936, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.038203780174255, 4.004009407043483, 9); 
-      void* var_61 = tensorBatchNorm(var_60, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-      void* var_62 = tensorRelu(var_61); 
-      void* var_63 = tensorConvolution(var_62, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-      void* var_64 = tensorBatchNorm(var_63, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-      void* var_65 = tensorRelu(var_64); 
-      void* var_66 = ConvLayer_PROMISE(var_65, 0.0, 5.732498347759442, conv2d_12_w, -0.10839721685647964, 0.11625668607652187, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.3111015114784244, 4.462933233261136, 9); 
-      void* var_67 = tensorBatchNorm(var_66, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-      void* var_68 = tensorRelu(var_67); 
-      void* var_69 = tensorConvolution(var_68, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-      void* var_70 = tensorBatchNorm(var_69, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-      void* var_71 = tensorRelu(var_70); 
-      void* var_72 = ConvLayer_PROMISE(var_71, 0.0, 7.240498211860681, conv2d_13_w, -0.08623744961619377, 0.08859449951350662, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.175431394577027, 6.2043294754027345, 9); 
-      void* var_73 = tensorBatchNorm(var_72, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-      void* var_74 = tensorRelu(var_73); 
-      void* var_75 = tensorConvolution(var_74, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-      void* var_76 = tensorBatchNorm(var_75, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-      void* var_77 = tensorRelu(var_76); 
-      void* var_78 = ConvLayer_PROMISE(var_77, 0.0, 7.813958834648251, conv2d_14_w, -0.06813025139272214, 0.07002027779817581, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -10.920566423416137, 2.6442912578582534, 9); 
-      void* var_79 = tensorBatchNorm(var_78, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-      void* var_80 = tensorRelu(var_79); 
-      void* var_81 = tensorPooling(var_80,1,2,2,0,0,2,2); 
-      void* var_82 = FCLayer_PROMISE(var_81, 0.0, 2.8692066650391013, dense_1_w, -0.22301019695401192, 0.1442659378200768, dense_1_b, -0.1654396, 0.23336112, -1, -12.245949958801269, 23.80532513427739, 9); 
-      void* var_83 = tensorSoftmax(var_82); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_83); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEMO_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEMO_promise.cc
deleted file mode 100644
index f696bbf259b26eb4e45b73aa05658f3208c6fae6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEMO_promise.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Emboss - Motion Blur - Outline ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEMO_calib.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                          float_type,
-                                          test_batch_size, 1, H, W);
-
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* emboss_out = ConvLayer_PROMISE(gaussian_out, 0, 255, emboss_filter, -1, 1, emboss_bias, 128, 128,
-                                           2, 2, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* motionblur_out = ConvLayer_PROMISE(emboss_out, 0, 255, motionblur_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(motionblur_out, 0, 255, outline_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-
-    hpvm_request_tensor(result, 0);
-    //dumpOutput(result);
-
-    computePSNRViolation(golden_output, result, PSNR);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEMO_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEMO_valid.cc
deleted file mode 100644
index a8d049f7aca85fbc00e7bcd2b47c083d4f6ea377..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEMO_valid.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Emboss - Motion Blur - Outline ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEMO_valid.bin",
-					   float_type,
-					   test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/test_4573.bin",
-				     float_type,
-				     test_batch_size, 1, H, W);
-
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* emboss_out = ConvLayer_PROMISE(gaussian_out, 0, 255, emboss_filter, -1, 1, emboss_bias, 128, 128,
-                                           2, 2, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* motionblur_out = ConvLayer_PROMISE(emboss_out, 0, 255, motionblur_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(motionblur_out, 0, 255, outline_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-
-    hpvm_request_tensor(result, 0);
-    //dumpOutput(result, "GEMO_valid.bin");
-
-    computePSNRViolation(golden_output, result, PSNR);
-    
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEOM_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEOM_promise.cc
deleted file mode 100644
index 26ab88f81a70e032a723537864fb5eb4fead6a5b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEOM_promise.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Emboss - Outline - Motion Blur ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEOM_calib.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                          float_type,
-                                          test_batch_size, 1, H, W);
-
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* emboss_out = ConvLayer_PROMISE(gaussian_out, 0, 255, emboss_filter, -1, 1, emboss_bias, 128, 128,
-                                           2, 2, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* outline_out = ConvLayer_PROMISE(emboss_out, 0, 255, outline_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(outline_out, 0, 255, motionblur_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-
-    hpvm_request_tensor(result, 0);
-
-    computePSNRViolation(golden_output, result, PSNR);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEOM_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEOM_valid.cc
deleted file mode 100644
index 1fd996f701664358d408f3b7e7a095a66c78f9ef..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEOM_valid.cc
+++ /dev/null
@@ -1,148 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Emboss - Outline - Motion Blur ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEOM_valid.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/test_4573.bin",
-                                          float_type,
-                                          test_batch_size, 1, H, W);
-
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* emboss_out = ConvLayer_PROMISE(gaussian_out, 0, 255, emboss_filter, -1, 1, emboss_bias, 128, 128,
-                                           2, 2, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* outline_out = ConvLayer_PROMISE(emboss_out, 0, 255, outline_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(outline_out, 0, 255, motionblur_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-
-    hpvm_request_tensor(result, 0);
-    //dumpOutput(result, "GEOM_valid.bin");
-
-    computePSNRViolation(golden_output, result, PSNR);
-
-    freeOutputTensors();  
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEO_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEO_promise.cc
deleted file mode 100644
index 577145a01eb3e5e941588fbfddf153c78156dc0c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEO_promise.cc
+++ /dev/null
@@ -1,139 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Emboss - Outline ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEO_calib.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                          float_type,
-                                          test_batch_size, 1, H, W);
-
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* emboss_out = ConvLayer_PROMISE(gaussian_out, 0, 255, emboss_filter, -1, 1, emboss_bias, 128, 128,
-                                           2, 2, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(emboss_out, 0, 255, outline_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-
-    hpvm_request_tensor(result, 0);
-    dumpOutput(result, "GEO_approx.bin");
-
-    computePSNRViolation(golden_output, result, PSNR);
-    freeOutputTensors();
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEO_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEO_valid.cc
deleted file mode 100644
index 6f269a8ad51734e372ce14dd5c5b94342417e2b2..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GEO_valid.cc
+++ /dev/null
@@ -1,143 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Emboss - Outline ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GEO_valid.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/test_4573.bin",
-                                          float_type,
-                                          test_batch_size, 1, H, W);
-
-    // NOTE: Filter descriptors do NOT have batch size
-    // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-    // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* emboss_out = ConvLayer_PROMISE(gaussian_out, 0, 255, emboss_filter, -1, 1, emboss_bias, 128, 128,
-                                           2, 2, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(emboss_out, 0, 255, outline_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-
-
-    hpvm_request_tensor(result, 0);
-    dumpOutput(result, "GEO_40_psnr.bin");
-
-    computePSNRViolation(golden_output, result, PSNR);
-    freeOutputTensors();
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSME_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSME_promise.cc
deleted file mode 100644
index d6150a9ee4b5d99eaa736a911e0fc9da2d593150..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSME_promise.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Sharpen - Motion Blur - Emboss ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GSME_calib.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-				     float_type,
-				     test_batch_size, 1, H, W);
-
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* sharpen_out = ConvLayer_PROMISE(gaussian_out, 0, 255, sharpen_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* motionblur_out = ConvLayer_PROMISE(sharpen_out, 0, 255, motionblur_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(motionblur_out, 0, 255, emboss_filter, -1, 1, emboss_bias, 128, 128,
-                                           2, 2, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    hpvm_request_tensor(result, 0);
-    //dumpOutput(result);
-
-    computePSNRViolation(golden_output, result, PSNR);
-    freeOutputTensors();
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSME_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSME_valid.cc
deleted file mode 100644
index 8c2a1b9f63d41654bf6425a5670cb41fd64b64cf..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSME_valid.cc
+++ /dev/null
@@ -1,146 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Sharpen - Motion Blur - Emboss ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GSME_valid.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/test_4573.bin",
-				     float_type,
-				     test_batch_size, 1, H, W);
-
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* sharpen_out = ConvLayer_PROMISE(gaussian_out, 0, 255, sharpen_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* motionblur_out = ConvLayer_PROMISE(sharpen_out, 0, 255, motionblur_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(motionblur_out, 0, 255, emboss_filter, -1, 1, emboss_bias, 128, 128,
-                                           2, 2, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    hpvm_request_tensor(result, 0);
-    dumpOutput(result, "GSME_valid_20db.bin");
-
-    computePSNRViolation(golden_output, result, PSNR);
-    freeOutputTensors();
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSM_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSM_promise.cc
deleted file mode 100644
index 59077e94a918a8d5540b713c08af5eb6e73cb86f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSM_promise.cc
+++ /dev/null
@@ -1,138 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Sharpen - Motion Blur ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GSM_calib.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/calibration_4572.bin",
-                                          float_type,
-                                          test_batch_size, 1, H, W);
-
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* sharpen_out = ConvLayer_PROMISE(gaussian_out, 0, 255, sharpen_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(sharpen_out, 0, 255, motionblur_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-
-    hpvm_request_tensor(result, 0);
-    dumpOutput(result, "GSM_approx.bin");
-
-    computePSNRViolation(golden_output, result, PSNR);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(1);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSM_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSM_valid.cc
deleted file mode 100644
index 45b38e82864b97be220eecbe91ce3d6bfdce6318..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_GSM_valid.cc
+++ /dev/null
@@ -1,141 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Sharpen - Motion Blur ********** \n");
-
-  int test_batch_size = 1000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/GSM_valid.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/test_4573.bin",
-                                          float_type,
-                                          test_batch_size, 1, H, W);
-
-    // NOTE: Filter descriptors do NOT have batch size
-    // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-    // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("promise_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* sharpen_out = ConvLayer_PROMISE(gaussian_out, 0, 255, sharpen_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(sharpen_out, 0, 255, motionblur_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-
-    hpvm_request_tensor(result, 0);
-    dumpOutput(result, "GSM_valid_30db.bin");
-
-    computePSNRViolation(golden_output, result, PSNR);
-
-    if(Opentuner_run){
-
-      const char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_promise.cc
deleted file mode 100644
index 7dd70134731c4fc13d6b7ca239f0566942c02885..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/pipeline_promise.cc
+++ /dev/null
@@ -1,149 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <string.h>
-
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-bool Opentuner_run = false;
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testPipeline(){
-
-  int total_runs = 1;
-  if(Opentuner_run){
-    total_runs = 1000000;
-  }
-
-  printf("********* Pipeline: Gaussian - Outline - Motion Blur - Emboss ********** \n");
-
-  int test_batch_size = 2000;
-  int H = 240;
-  int W = 300;
-  float PSNR = 30;
-
-  void* golden_output = readTrainedWeights("../model_params/pipeline/golden_output/caltech-G-O-M-E-FP32-clipped-2000.bin",
-                                        float_type,
-                                        test_batch_size, 1, H, W);
-
-  clearTensorMap();
-  for(int i = 0; i < total_runs; i++){
-    void* input = readTrainedWeights("../model_params/pipeline/dataset/caltech101_255_float32.bin",
-                                          float_type,
-                                          test_batch_size, 1, H, W);
-
-    // NOTE: Filter descriptors do NOT have batch size
-    // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-    // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-    void* gaussian_filter = readTrainedWeights("../model_params/pipeline/filters/GaussianFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* outline_filter = readTrainedWeights("../model_params/pipeline/filters/OutlineFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* sharpen_filter = readTrainedWeights("../model_params/pipeline/filters/SharpenFilter.bin",
-                                            float_type, 1, 1, 3, 3);
-    void* motionblur_filter = readTrainedWeights("../model_params/pipeline/filters/MotionblurFilter.bin",
-                                            float_type, 1, 1, 9, 9);
-    void* emboss_filter = readTrainedWeights("../model_params/pipeline/filters/EmbossFilter.bin",
-                                            float_type, 1, 1, 5, 5);
-    void* emboss_bias = readTrainedWeights("../model_params/pipeline/filters/EmbossBias.bin",
-                                            float_type, 1, 1, 1, 1);
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd = open(myfifo, O_RDONLY);
-
-      int ret_val = fcntl(fd, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-
-      char str[100];
-      read(fd, str, 80);
-      if(strcmp(str, "stop_run") == 0){
-        abort();
-      }
-
-      close(fd);
-    }
-
-    readOpenTunerFlags("opentuner_flags"); // Resets the OpenTuner counters
-
-
-    void* gaussian_out = ConvLayer_PROMISE(input, 0, 255, gaussian_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-    printf("Gaussian done\n");
-    void* outline_out = ConvLayer_PROMISE(gaussian_out, 0, 255, outline_filter, -1, 8, NULL, 0, 0,
-                                           1, 1, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* motionblur_out = ConvLayer_PROMISE(outline_out, 0, 255, motionblur_filter, 0, 1, NULL, 0, 0,
-                                           4, 4, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    void* result = ConvLayer_PROMISE(motionblur_out, 0, 255, emboss_filter, -1, 1, emboss_bias, 128, 128,
-                                           2, 2, 1, 1,
-                                           0, 0, // pool? no pooling needed
-                                           2,
-                                           0, 255, // out min max? should we assume 0 - 255 for all filters.
-                                                   // Will have to rerun to generate golden output
-                                           9);
-
-    hpvm_request_tensor(result, 0);
-    dumpOutput(result);
-
-    computePSNRViolation(golden_output, result, PSNR);
-    freeOutputTensors();
-
-    if(Opentuner_run){
-
-      char* myfifo = "/tmp/myfifo";
-      int fd_out = open(myfifo, O_WRONLY);
-      int ret_val = fcntl(fd_out, F_GETFD);
-      if(ret_val == -1){
-        printf("Invalid descriptor \n");
-        abort();
-      }
-      const char* str = "completed***!\n\0";
-      write(fd_out, str, 80);
-      close(fd_out);
-    }
-  }
-}
-
-
-int main(int argc, char* argv[]){
-
-  if(argc > 1)
-    Opentuner_run = true;
-
-  llvm_hpvm_initTensorRt(0);
-
-  testPipeline();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/resnet18_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/resnet18_promise.cc
deleted file mode 100644
index b229fc9c2b81703c2d29039480297192a0a3c746..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/resnet18_promise.cc
+++ /dev/null
@@ -1,224 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-int total_runs = 1;
-float bench_acc = 0;
-int to_skip = 5;
-
-
-int main(int argc, char* argv[]){ 
-
-  int test_input_size = 3000; 
-  int batch_size = 1000;
-  int offset = 5000;
-
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  if (argc > 2){
-    bench_acc = atof(argv[2]);
-  }
-
-  if(argc > 3){
-    to_skip = atoi(argv[3]);   
-  }
-
-  if(argc > 4){
-    test_input_size = atoi(argv[4]);   
-  }
-
-  if(argc > 5){
-    offset = atoi(argv[5]);   
-  }
-
-  
-
-  llvm_hpvm_initTensorRt(1); 
-
-  int missed = 0; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    if (missed >= to_skip){
-     break;           
-    }
-
-    startMemTracking(); 
-    
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../model_params/resnet18_cifar10_promise/");	   
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-      void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-      void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-      void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-      void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-      void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-      void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-      void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-      void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-      std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-      void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-      void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-      std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-      void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-      void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-      void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-      void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-      void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-      void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-      void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-      void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-      void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-      void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-      void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-      void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-      std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-      void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-      void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-      std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-      void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-      void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-      void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-      void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-      void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-      void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-      void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-      void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-      void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-      void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-      void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-      int start = i * batch_size + offset; 
-      int end = (i + 1) * batch_size + offset; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -0.5500815, 0.60786617, conv2d_1_w, -1.0248864, 1.2929907, conv2d_1_b, -0.36291853, 0.2533059, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.8791630274057383, 9); 
-      void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 0.8791630274057383, conv2d_2_w, -0.69884616, 0.71849966, conv2d_2_b, -0.2781147, 0.45571187, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.1859495645761484, 9); 
-      void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 1.1859495645761484, conv2d_3_w, -0.59568167, 0.7714691, conv2d_3_b, -0.8602873, 0.19743633, 1, 1, 1, 1, -1, 0, -1, -2.2316832554340365, 2.266301159858699, 9); 
-      void* var_3 = tensorAdd(var_0, var_2); 
-      void* var_4 = tensorRelu(var_3); 
-      void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 2.789569139480591, conv2d_4_w, -0.41976976, 0.43748936, conv2d_4_b, -0.7021962, 0.3033103, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.3341254055499974, 9); 
-      void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 1.3341254055499974, conv2d_5_w, -0.46757826, 0.4635873, conv2d_5_b, -0.20662616, 0.1778044, 1, 1, 1, 1, -1, 0, -1, -0.9912706619501114, 1.0245310074090952, 9); 
-      void* var_7 = tensorAdd(var_4, var_6); 
-      void* var_8 = tensorRelu(var_7); 
-      void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 2.998989346027372, conv2d_6_w, -0.64404047, 0.45383143, conv2d_6_b, -0.819547, 0.38550296, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.2850778144597967, 9); 
-      void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 1.2850778144597967, conv2d_7_w, -0.41986948, 0.33654243, conv2d_7_b, -0.3563013, 0.22371122, 1, 1, 1, 1, -1, 0, -1, -1.2940701305866242, 0.7332147359848022, 9); 
-      void* var_11 = tensorAdd(var_8, var_10); 
-      void* var_12 = tensorRelu(var_11); 
-      void* var_13 = ConvLayer_PROMISE(var_12, 0.0, 2.8626382386684384, conv2d_8_w, -0.4805263, 0.50655717, conv2d_8_b, -0.296758, 0.7742441, 1, 1, 2, 2, -1, 0, 1, 0.0, 3.6232483506202584, 9); 
-      void* var_14 = ConvLayer_PROMISE(var_13, 0.0, 3.6232483506202584, conv2d_9_w, -0.52083415, 0.45517674, conv2d_9_b, -0.20242067, 0.8236838, 1, 1, 1, 1, -1, 0, -1, -6.319877154827118, 6.882811555862418, 9); 
-      void* var_15 = ConvLayer_PROMISE(var_12, 0.0, 2.8626382386684384, conv2d_10_w, -0.5338656, 1.3395424, conv2d_10_b, -0.20242067, 0.8236838, 0, 0, 2, 2, -1, 0, -1, -0.9930689406394959, 2.8721754658222096, 9); 
-      void* var_16 = tensorAdd(var_15, var_14); 
-      void* var_17 = tensorRelu(var_16); 
-      void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 8.315858840942383, conv2d_11_w, -0.34429058, 0.43629733, conv2d_11_b, -1.0744808, 0.056708273, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.6893706333637226, 9); 
-      void* var_19 = ConvLayer_PROMISE(var_18, 0.0, 2.6893706333637226, conv2d_12_w, -0.30342352, 0.39493486, conv2d_12_b, -0.44630566, 0.6492069, 1, 1, 1, 1, -1, 0, -1, -1.8801953810453416, 1.714934362173068, 9); 
-      void* var_20 = tensorAdd(var_17, var_19); 
-      void* var_21 = tensorRelu(var_20); 
-      void* var_22 = ConvLayer_PROMISE(var_21, 0.0, 8.381670951843262, conv2d_13_w, -0.38351893, 0.45775774, conv2d_13_b, -1.4733055, -0.014426912, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.569231034517287, 9); 
-      void* var_23 = ConvLayer_PROMISE(var_22, 0.0, 2.569231034517287, conv2d_14_w, -0.25695276, 0.45372736, conv2d_14_b, -0.5259744, 0.26591402, 1, 1, 1, 1, -1, 0, -1, -1.9701244848966597, 1.4661400413513093, 9); 
-      void* var_24 = tensorAdd(var_21, var_23); 
-      void* var_25 = tensorRelu(var_24); 
-      void* var_26 = ConvLayer_PROMISE(var_25, 0.0, 8.188224797248836, conv2d_15_w, -0.55299705, 0.5443531, conv2d_15_b, -0.71790683, 1.2730768, 1, 1, 2, 2, -1, 0, 1, 0.0, 12.411911067962677, 9); 
-      void* var_27 = ConvLayer_PROMISE(var_26, 0.0, 12.411911067962677, conv2d_16_w, -0.4203967, 0.48641303, conv2d_16_b, -0.90653443, 1.3546854, 1, 1, 1, 1, -1, 0, -1, -25.407194147109987, 20.519153985977383, 9); 
-      void* var_28 = ConvLayer_PROMISE(var_25, 0.0, 8.188224797248836, conv2d_17_w, -0.4365755, 0.84913826, conv2d_17_b, -0.90653443, 1.3546851, 0, 0, 2, 2, -1, 0, -1, -4.256520752906799, 5.730506427288059, 9); 
-      void* var_29 = tensorAdd(var_28, var_27); 
-      void* var_30 = tensorRelu(var_29); 
-      void* var_31 = ConvLayer_PROMISE(var_30, 0.0, 22.350475664138983, conv2d_18_w, -0.38657624, 0.5228989, conv2d_18_b, -1.2083547, 0.76361173, 1, 1, 1, 1, -1, 0, 1, 0.0, 23.93387042045599, 9); 
-      void* var_32 = ConvLayer_PROMISE(var_31, 0.0, 23.93387042045599, conv2d_19_w, -0.40857902, 0.575035, conv2d_19_b, -1.8731614, 1.0960501, 1, 1, 1, 1, -1, 0, -1, -35.37134181976318, 19.209569931030273, 9); 
-      void* var_33 = tensorAdd(var_30, var_32); 
-      void* var_34 = tensorRelu(var_33); 
-      void* var_35 = ConvLayer_PROMISE(var_34, 0.0, 29.434949998855657, conv2d_20_w, -0.33079496, 0.5893278, conv2d_20_b, -1.0234511, 1.0016295, 1, 1, 1, 1, -1, 0, 1, 0.0, 27.216757345199866, 9); 
-      void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 27.216757345199866, conv2d_21_w, -0.27897888, 0.38280907, conv2d_21_b, -2.2086356, 1.0066502, 1, 1, 1, 1, -1, 0, -1, -42.31447326660156, 29.365212144852038, 9); 
-      void* var_37 = tensorAdd(var_34, var_36); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_39 = tensorPooling(var_38,1,8,8,0,0,8,8); 
-      void* var_40 = FCLayer_PROMISE(var_39, 0.0, 13.736315393447876, dense_1_w, -1.5092047, 1.0279838, dense_1_b, -0.49379802, 0.61032647, -1, -45.52749088287353, 31.64324799537669, 9); 
-      void* var_41 = tensorSoftmax(var_40); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_41); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-
-    if (final_accuracy < bench_acc)
-      missed += 1;
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/resnet18_promise_relu.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/resnet18_promise_relu.cc
deleted file mode 100644
index 7b7f989c16d9203778a602bc03b79a5d41c7a3ba..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/resnet18_promise_relu.cc
+++ /dev/null
@@ -1,162 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-
-std::string dir_prefix = std::string("../model_params/resnet18_cifar10_promise/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_0 = ConvLayer_PROMISE(input, -0.5500815, 0.60786617, conv2d_1_w, -0.71850556, 0.79279953, conv2d_1_b, -0.2551266, 0.14472985, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.2546353, 9); 
-void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 2.2546353, conv2d_2_w, -0.5433847, 0.5556715, conv2d_2_b, -0.19323121, 0.20603828, 1, 1, 1, 1, -1, 0, 1, 0.0, 3.6603086, 9); 
-void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 3.6603086, conv2d_3_w, -0.541787, 0.51889443, conv2d_3_b, -0.2030649, 0.21818772, 1, 1, 1, 1, -1, 0, -1, -5.471612, 5.295037, 9); 
-void* var_3 = tensorAdd(var_0, var_2); 
-void* var_4 = tensorRelu(var_3); 
-void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 6.738059, conv2d_4_w, -0.691922, 0.3410589, conv2d_4_b, -0.5095374, 0.18683507, 1, 1, 1, 1, -1, 0, 1, 0.0, 5.2085133, 9); 
-void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 5.2085133, conv2d_5_w, -0.40904462, 0.39255425, conv2d_5_b, -0.2069035, 0.117769495, 1, 1, 1, 1, -1, 0, -1, -5.6378636, 6.844163, 9); 
-void* var_7 = tensorAdd(var_4, var_6); 
-void* var_8 = tensorRelu(var_7); 
-void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 8.4156885, conv2d_6_w, -0.38497055, 0.3736088, conv2d_6_b, -0.14458452, 0.18792383, 1, 1, 1, 1, -1, 0, 1, 0.0, 6.5020022, 9); 
-void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 6.5020022, conv2d_7_w, -0.30858195, 0.4282964, conv2d_7_b, -0.1807645, 0.07482771, 1, 1, 1, 1, -1, 0, -1, -3.3083274, 5.364109, 9); 
-void* var_11 = tensorAdd(var_8, var_10); 
-void* var_12 = tensorRelu(var_11); 
-void* var_13 = ConvLayer_PROMISE(var_12, 0.0, 9.382513, conv2d_8_w, -0.5838584, 0.44527876, conv2d_8_b, -0.2637087, 0.22768898, 1, 1, 2, 2, -1, 0, 1, 0.0, 12.158108, 9); 
-void* var_14 = ConvLayer_PROMISE(var_13, 0.0, 12.158108, conv2d_9_w, -0.46162197, 0.42936426, conv2d_9_b, -0.1289545, 0.51804763, 1, 1, 1, 1, -1, 0, -1, -17.15394, 12.169734, 9); 
-void* var_15 = ConvLayer_PROMISE(var_12, 0.0, 9.382513, conv2d_10_w, -0.69971406, 0.99415976, conv2d_10_b, -0.1289545, 0.51804763, 0, 0, 2, 2, -1, 0, -1, -5.418469, 11.448848, 9); 
-void* var_16 = tensorAdd(var_15, var_14); 
-void* var_17 = tensorRelu(var_16); 
-void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 18.46502, conv2d_11_w, -0.43470153, 0.36867705, conv2d_11_b, -0.51738244, 0.15350178, 1, 1, 1, 1, -1, 0, 1, 0.0, 13.211603, 9); 
-void* var_19 = ConvLayer_PROMISE(var_18, 0.0, 13.211603, conv2d_12_w, -0.29948497, 0.38820583, conv2d_12_b, -0.37389848, 0.26664862, 1, 1, 1, 1, -1, 0, -1, -10.085186, 13.555471, 9); 
-void* var_20 = tensorAdd(var_17, var_19); 
-void* var_21 = tensorRelu(var_20); 
-void* var_22 = ConvLayer_PROMISE(var_21, 0.0, 22.695429, conv2d_13_w, -0.44317818, 0.30531815, conv2d_13_b, -0.36851564, 0.06573071, 1, 1, 1, 1, -1, 0, 1, 0.0, 19.886229, 9); 
-void* var_23 = ConvLayer_PROMISE(var_22, 0.0, 19.886229, conv2d_14_w, -0.3271309, 0.33153397, conv2d_14_b, -0.38927156, 0.066472165, 1, 1, 1, 1, -1, 0, -1, -8.295334, 15.001421, 9); 
-void* var_24 = tensorAdd(var_21, var_23); 
-void* var_25 = tensorRelu(var_24); 
-void* var_26 = ConvLayer_PROMISE(var_25, 0.0, 28.637527, conv2d_15_w, -0.44983515, 0.43999374, conv2d_15_b, -0.21998975, 0.36213604, 1, 1, 2, 2, -1, 0, 1, 0.0, 44.106163, 9); 
-void* var_27 = ConvLayer_PROMISE(var_26, 0.0, 44.106163, conv2d_16_w, -0.4508994, 0.41697323, conv2d_16_b, -0.27649263, 0.42242092, 1, 1, 1, 1, -1, 0, -1, -47.52727, 75.15572, 9); 
-void* var_28 = ConvLayer_PROMISE(var_25, 0.0, 28.637527, conv2d_17_w, -0.57827795, 0.7829617, conv2d_17_b, -0.27649248, 0.42242065, 0, 0, 2, 2, -1, 0, -1, -8.998529, 10.628808, 9); 
-void* var_29 = tensorAdd(var_28, var_27); 
-void* var_30 = tensorRelu(var_29); 
-void* var_31 = ConvLayer_PROMISE(var_30, 0.0, 77.00688, conv2d_18_w, -0.37020415, 0.4076619, conv2d_18_b, -0.572569, 0.288411, 1, 1, 1, 1, -1, 0, 1, 0.0, 58.209835, 9); 
-void* var_32 = ConvLayer_PROMISE(var_31, 0.0, 58.209835, conv2d_19_w, -0.40217596, 0.4803875, conv2d_19_b, -0.84837836, 0.41470897, 1, 1, 1, 1, -1, 0, -1, -61.702118, 45.982677, 9); 
-void* var_33 = tensorAdd(var_30, var_32); 
-void* var_34 = tensorRelu(var_33); 
-void* var_35 = ConvLayer_PROMISE(var_34, 0.0, 98.688995, conv2d_20_w, -0.41761914, 0.4584275, conv2d_20_b, -0.335136, 0.42988807, 1, 1, 1, 1, -1, 0, 1, 0.0, 95.06278, 9); 
-void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 95.06278, conv2d_21_w, -0.32336038, 0.35931262, conv2d_21_b, -0.645176, 0.45402992, 1, 1, 1, 1, -1, 0, -1, -116.62798, 127.2517, 9); 
-void* var_37 = tensorAdd(var_34, var_36); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorPooling(var_38,1,8,8,0,0,8,8); 
-void* var_40 = FCLayer_PROMISE(var_39, 0.0, 22.762705, dense_1_w, -0.876813, 0.6065728, dense_1_b, -0.36824417, 0.25160706, -1, -30.914663, 49.802082, 9); 
-void* var_41 = tensorSoftmax(var_40); 
-
-computeAccuracy2(labels,10000,var_41); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/resnet18_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/resnet18_valid.cc
deleted file mode 100644
index 63aef3744fabc598ccc6653534074283edecef03..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/resnet18_valid.cc
+++ /dev/null
@@ -1,189 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  int total_runs = 20; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-    startMemTracking(); 
-
-    int test_input_size = 5000; 
-    int batch_size = 2500;
-    int offset = 0;
-    int batch_count = test_input_size / batch_size; 
-    float final_accuracy = 0.0; 
-
-    for(int i = 0; i < batch_count; i++){ 
-
-      std::string dir_prefix = std::string("../model_params/resnet18_cifar10_promise/");	   
-      std::string input_path =  dir_prefix + std::string("input.bin"); 
-      std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-      std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-      void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-      std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-      void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-      void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-      void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-      void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-      void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-      void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-      void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-      void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-      void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-      void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-      void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-      void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-      std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-      void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-      std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-      void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-      std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-      void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-      void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-      std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-      void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-      void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-      void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-      void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-      void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-      void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-      void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-      void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-      void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-      void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-      std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-      void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-      std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-      void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-      std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-      void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-      void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-      std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-      void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-      void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-      void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-      void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-      void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-      void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-      void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-      void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-      void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-      std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-      void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-      std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-      void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-      std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-      void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-      std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-      void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-      int start = i * batch_size + offset; 
-      int end = (i + 1) * batch_size + offset; 
-
-      void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-      void* var_0 = ConvLayer_PROMISE(input, -0.5500815, 0.60786617, conv2d_1_w, -1.0248864, 1.2929907, conv2d_1_b, -0.36291853, 0.2533059, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.8791630274057383, 9); 
-      void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 0.8791630274057383, conv2d_2_w, -0.69884616, 0.71849966, conv2d_2_b, -0.2781147, 0.45571187, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.1859495645761484, 9); 
-      void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 1.1859495645761484, conv2d_3_w, -0.59568167, 0.7714691, conv2d_3_b, -0.8602873, 0.19743633, 1, 1, 1, 1, -1, 0, -1, -2.2316832554340365, 2.266301159858699, 9); 
-      void* var_3 = tensorAdd(var_0, var_2); 
-      void* var_4 = tensorRelu(var_3); 
-      void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 2.789569139480591, conv2d_4_w, -0.41976976, 0.43748936, conv2d_4_b, -0.7021962, 0.3033103, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.3341254055499974, 9); 
-      void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 1.3341254055499974, conv2d_5_w, -0.46757826, 0.4635873, conv2d_5_b, -0.20662616, 0.1778044, 1, 1, 1, 1, -1, 0, -1, -0.9912706619501114, 1.0245310074090952, 9); 
-      void* var_7 = tensorAdd(var_4, var_6); 
-      void* var_8 = tensorRelu(var_7); 
-      void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 2.998989346027372, conv2d_6_w, -0.64404047, 0.45383143, conv2d_6_b, -0.819547, 0.38550296, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.2850778144597967, 9); 
-      void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 1.2850778144597967, conv2d_7_w, -0.41986948, 0.33654243, conv2d_7_b, -0.3563013, 0.22371122, 1, 1, 1, 1, -1, 0, -1, -1.2940701305866242, 0.7332147359848022, 9); 
-      void* var_11 = tensorAdd(var_8, var_10); 
-      void* var_12 = tensorRelu(var_11); 
-      void* var_13 = ConvLayer_PROMISE(var_12, 0.0, 2.8626382386684384, conv2d_8_w, -0.4805263, 0.50655717, conv2d_8_b, -0.296758, 0.7742441, 1, 1, 2, 2, -1, 0, 1, 0.0, 3.6232483506202584, 9); 
-      void* var_14 = ConvLayer_PROMISE(var_13, 0.0, 3.6232483506202584, conv2d_9_w, -0.52083415, 0.45517674, conv2d_9_b, -0.20242067, 0.8236838, 1, 1, 1, 1, -1, 0, -1, -6.319877154827118, 6.882811555862418, 9); 
-      void* var_15 = ConvLayer_PROMISE(var_12, 0.0, 2.8626382386684384, conv2d_10_w, -0.5338656, 1.3395424, conv2d_10_b, -0.20242067, 0.8236838, 0, 0, 2, 2, -1, 0, -1, -0.9930689406394959, 2.8721754658222096, 9); 
-      void* var_16 = tensorAdd(var_15, var_14); 
-      void* var_17 = tensorRelu(var_16); 
-      void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 8.315858840942383, conv2d_11_w, -0.34429058, 0.43629733, conv2d_11_b, -1.0744808, 0.056708273, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.6893706333637226, 9); 
-      void* var_19 = ConvLayer_PROMISE(var_18, 0.0, 2.6893706333637226, conv2d_12_w, -0.30342352, 0.39493486, conv2d_12_b, -0.44630566, 0.6492069, 1, 1, 1, 1, -1, 0, -1, -1.8801953810453416, 1.714934362173068, 9); 
-      void* var_20 = tensorAdd(var_17, var_19); 
-      void* var_21 = tensorRelu(var_20); 
-      void* var_22 = ConvLayer_PROMISE(var_21, 0.0, 8.381670951843262, conv2d_13_w, -0.38351893, 0.45775774, conv2d_13_b, -1.4733055, -0.014426912, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.569231034517287, 9); 
-      void* var_23 = ConvLayer_PROMISE(var_22, 0.0, 2.569231034517287, conv2d_14_w, -0.25695276, 0.45372736, conv2d_14_b, -0.5259744, 0.26591402, 1, 1, 1, 1, -1, 0, -1, -1.9701244848966597, 1.4661400413513093, 9); 
-      void* var_24 = tensorAdd(var_21, var_23); 
-      void* var_25 = tensorRelu(var_24); 
-      void* var_26 = ConvLayer_PROMISE(var_25, 0.0, 8.188224797248836, conv2d_15_w, -0.55299705, 0.5443531, conv2d_15_b, -0.71790683, 1.2730768, 1, 1, 2, 2, -1, 0, 1, 0.0, 12.411911067962677, 9); 
-      void* var_27 = ConvLayer_PROMISE(var_26, 0.0, 12.411911067962677, conv2d_16_w, -0.4203967, 0.48641303, conv2d_16_b, -0.90653443, 1.3546854, 1, 1, 1, 1, -1, 0, -1, -25.407194147109987, 20.519153985977383, 9); 
-      void* var_28 = ConvLayer_PROMISE(var_25, 0.0, 8.188224797248836, conv2d_17_w, -0.4365755, 0.84913826, conv2d_17_b, -0.90653443, 1.3546851, 0, 0, 2, 2, -1, 0, -1, -4.256520752906799, 5.730506427288059, 9); 
-      void* var_29 = tensorAdd(var_28, var_27); 
-      void* var_30 = tensorRelu(var_29); 
-      void* var_31 = ConvLayer_PROMISE(var_30, 0.0, 22.350475664138983, conv2d_18_w, -0.38657624, 0.5228989, conv2d_18_b, -1.2083547, 0.76361173, 1, 1, 1, 1, -1, 0, 1, 0.0, 23.93387042045599, 9); 
-      void* var_32 = ConvLayer_PROMISE(var_31, 0.0, 23.93387042045599, conv2d_19_w, -0.40857902, 0.575035, conv2d_19_b, -1.8731614, 1.0960501, 1, 1, 1, 1, -1, 0, -1, -35.37134181976318, 19.209569931030273, 9); 
-      void* var_33 = tensorAdd(var_30, var_32); 
-      void* var_34 = tensorRelu(var_33); 
-      void* var_35 = ConvLayer_PROMISE(var_34, 0.0, 29.434949998855657, conv2d_20_w, -0.33079496, 0.5893278, conv2d_20_b, -1.0234511, 1.0016295, 1, 1, 1, 1, -1, 0, 1, 0.0, 27.216757345199866, 9); 
-      void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 27.216757345199866, conv2d_21_w, -0.27897888, 0.38280907, conv2d_21_b, -2.2086356, 1.0066502, 1, 1, 1, 1, -1, 0, -1, -42.31447326660156, 29.365212144852038, 9); 
-      void* var_37 = tensorAdd(var_34, var_36); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_39 = tensorPooling(var_38,1,8,8,0,0,8,8); 
-      void* var_40 = FCLayer_PROMISE(var_39, 0.0, 13.736315393447876, dense_1_w, -1.5092047, 1.0279838, dense_1_b, -0.49379802, 0.61032647, -1, -45.52749088287353, 31.64324799537669, 9); 
-      void* var_41 = tensorSoftmax(var_40); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-      float accuracy = computeAccuracy2(labels, batch_size, var_41); 
-      final_accuracy += accuracy; 
-      freeBatchMemory(); 
- 
-    }
-
-    final_accuracy = final_accuracy / batch_count; 
-    dumpFinalAccuracy(final_accuracy); 
-
-
-  }
-
-  dumpExecutionAccuracies(); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_promise.cc
deleted file mode 100644
index ed11c5158d63c9e07188200e28cbfe32f08a87b2..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_promise.cc
+++ /dev/null
@@ -1,175 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-
-int total_runs = 1;
-float bench_acc = 0;
-int to_skip = 5;
-
-
-int main(int argc, char* argv[]){ 
-
-  int test_input_size = 3000; 
-  int batch_size = 1000;
-  int offset = 5000;
-
-  
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-
-  if (argc > 2){
-    bench_acc = atof(argv[2]);
-  }
-
-  if(argc > 3){
-    to_skip = atoi(argv[3]);   
-  }
-
-  if(argc > 4){
-    test_input_size = atoi(argv[4]);   
-  }
-
-  if(argc > 5){
-    offset = atoi(argv[5]);   
-  }
-
-
-  llvm_hpvm_initTensorRt(1); 
-
-  
-  int missed = 0; 
-  for (int i = 0 ; i < total_runs; i++){ 
-
-   if (missed >= to_skip){
-     break;           
-   }
-
-   startMemTracking(); 
-
-
-   int batch_count = test_input_size / batch_size; 
-   float final_accuracy = 0.0; 
-   
-   for(int i = 0; i < batch_count; i++){
-     
-     std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-     std::string input_path =  dir_prefix + std::string("input.bin"); 
-     std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-     std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-     void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-     std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-     void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-     void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-     std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-     void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-     void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-     std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-     void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-     void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-     std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-     void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-     void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-     std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-     void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-     void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-     void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-     void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-     void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-     void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-     std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-     void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-     void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-     void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-     void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-     void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-     void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-     void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-     void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-     void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-     void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-     void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-     void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-     std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-     void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-     void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-     std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-     void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-     int start = i * batch_size + offset; 
-     int end = (i + 1) * batch_size + offset;
-     
-
-     void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-     void* var_0 = ConvLayer_PROMISE(input, -1.7829767, 1.9456929, conv2d_1_w, -0.7450515, 0.71249133, conv2d_1_b, -1.5885142, 0.275554, 1, 1, 1, 1, -1, 0, 1, 0.0, 8.190712, 9); 
-     void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 8.190712, conv2d_2_w, -0.30790088, 0.43504623, conv2d_2_b, -1.4242363, 1.2602744, 1, 1, 1, 1, 0, 2, 1, 0.0, 19.023172, 9); 
-     void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 19.023172, conv2d_3_w, -0.29189092, 0.26958522, conv2d_3_b, -1.0527138, 0.9075671, 1, 1, 1, 1, -1, 0, 1, 0.0, 14.428051, 9); 
-     void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 14.428051, conv2d_4_w, -0.15521508, 0.1829038, conv2d_4_b, -0.845419, 1.9358484, 1, 1, 1, 1, 0, 2, 1, 0.0, 23.065294, 9); 
-     void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 23.065294, conv2d_5_w, -0.13149762, 0.14811686, conv2d_5_b, -0.7162557, 1.0370971, 1, 1, 1, 1, -1, 0, 1, 0.0, 15.165984, 9); 
-     void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 15.165984, conv2d_6_w, -0.06236292, 0.08321518, conv2d_6_b, -0.9067523, 0.9922458, 1, 1, 1, 1, -1, 0, 1, 0.0, 13.664733, 9); 
-     void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 13.664733, conv2d_7_w, -0.06471479, 0.1024472, conv2d_7_b, -0.15943134, 0.7988499, 1, 1, 1, 1, 0, 2, 1, 0.0, 19.025272, 9); 
-     void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 19.025272, conv2d_8_w, -0.06320205, 0.08291938, conv2d_8_b, -0.32540628, 0.5203079, 1, 1, 1, 1, -1, 0, 1, 0.0, 6.727217, 9); 
-     void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 6.727217, conv2d_9_w, -0.037707984, 0.051601283, conv2d_9_b, -0.25622904, 0.11251946, 1, 1, 1, 1, -1, 0, 1, 0.0, 3.2003012, 9); 
-     void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 3.2003012, conv2d_10_w, -0.056007143, 0.09549151, conv2d_10_b, -0.11591503, 0.06267536, 1, 1, 1, 1, 0, 2, 1, 0.0, 4.321189, 9); 
-     void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 4.321189, conv2d_11_w, -0.060094673, 0.10868926, conv2d_11_b, -0.105962686, 0.09584572, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.936297, 9); 
-     void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 2.936297, conv2d_12_w, -0.034618977, 0.05792674, conv2d_12_b, -0.4237576, 0.11035452, 1, 1, 1, 1, -1, 0, 1, 0.0, 4.87262, 9); 
-     void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.87262, conv2d_13_w, -0.035480656, 0.058295887, conv2d_13_b, -0.21477045, 0.14263579, 1, 1, 1, 1, 0, 2, 1, 0.0, 10.32133, 9); 
-     void* var_13 = FCLayer_PROMISE(var_12, 0.0, 10.32133, dense_1_w, -0.08929961, 0.11301676, dense_1_b, -0.20798548, 0.47405547, 1, 0.0, 13.91, 9); 
-     void* var_14 = FCLayer_PROMISE(var_13, 0.0, 13.91, dense_2_w, -0.6627122, 0.35539475, dense_2_b, -1.0631907, 0.9830786, -1, -70.45701, 87.34367, 9); 
-     void* var_15 = tensorSoftmax(var_14); 
-
-     uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-     float accuracy = computeAccuracy2(labels, batch_size, var_15, 100); 
-     final_accuracy += accuracy; 
-     freeBatchMemory(); 
- 
-   }
-
-   final_accuracy = final_accuracy / batch_count; 
-   dumpFinalAccuracy(final_accuracy);
-
-
-   if (final_accuracy < bench_acc)
-     missed += 1;
- }
-
- dumpExecutionAccuracies(); 
-
- llvm_hpvm_cleanupTensorRt(); 
-
- return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_promise_quant.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_promise_quant.cc
deleted file mode 100644
index bbc247fc46fa553a2d8fb479c77023c9960375d6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_promise_quant.cc
+++ /dev/null
@@ -1,133 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
- int total_runs = 1; 
- for (int i = 0 ; i < total_runs; i++){ 
-
-   startMemTracking(); 
-
-   int test_input_size = 10000; 
-   int batch_size = 2500; 
-   int batch_count = test_input_size / batch_size; 
-   float final_accuracy = 0.0; 
-
-   for(int i = 0; i < batch_count; i++){ 
-
-     std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-     std::string input_path =  dir_prefix + std::string("input.bin"); 
-     std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-     std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-     void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-     std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-     void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-     void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-     std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-     void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-     void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-     std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-     void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-     void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-     std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-     void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-     void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-     std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-     void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-     void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-     void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-     void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-     void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-     void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-     std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-     void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-     void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-     void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-     void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-     void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-     void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-     void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-     void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-     void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-     void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-     void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-     void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-     std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-     void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-     void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-     std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-     void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-     int start = i * batch_size; 
-     int end = (i + 1) * batch_size; 
-
-     void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-     void* var_0 = ConvLayer_PROMISE(input, -1.7829767, 1.9456929, conv2d_1_w, -0.7450515, 0.71249133, conv2d_1_b, -1.5885142, 0.275554, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.7384350299835205, 9); 
-     void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 1.7384350299835205, conv2d_2_w, -0.30790088, 0.43504623, conv2d_2_b, -1.4242363, 1.2602744, 1, 1, 1, 1, 0, 2, 1, 0.0, 4.417154796123498, 9); 
-     void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 4.417154796123498, conv2d_3_w, -0.29189092, 0.26958522, conv2d_3_b, -1.0527138, 0.9075671, 1, 1, 1, 1, -1, 0, 1, 0.0, 3.1919608163833573, 9); 
-     void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 3.1919608163833573, conv2d_4_w, -0.15521508, 0.1829038, conv2d_4_b, -0.845419, 1.9358484, 1, 1, 1, 1, 0, 2, 1, 0.0, 5.108994026184064, 9); 
-     void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 5.108994026184064, conv2d_5_w, -0.13149762, 0.14811686, conv2d_5_b, -0.7162557, 1.0370971, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.8264513099193493, 9); 
-     void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 2.8264513099193493, conv2d_6_w, -0.06236292, 0.08321518, conv2d_6_b, -0.9067523, 0.9922458, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.507186658382409, 9); 
-     void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 2.507186658382409, conv2d_7_w, -0.06471479, 0.1024472, conv2d_7_b, -0.15943134, 0.7988499, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.550416946411133, 9); 
-     void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 2.550416946411133, conv2d_8_w, -0.06320205, 0.08291938, conv2d_8_b, -0.32540628, 0.5203079, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.7303829237818675, 9); 
-     void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 0.7303829237818675, conv2d_9_w, -0.037707984, 0.051601283, conv2d_9_b, -0.25622904, 0.11251946, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.32286912292241965, 9); 
-     void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 0.32286912292241965, conv2d_10_w, -0.056007143, 0.09549151, conv2d_10_b, -0.11591503, 0.06267536, 1, 1, 1, 1, 0, 2, 1, 0.0, 0.47936276525258825, 9); 
-     void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 0.47936276525258825, conv2d_11_w, -0.060094673, 0.10868926, conv2d_11_b, -0.105962686, 0.09584572, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.6409912902116734, 9); 
-     void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 0.6409912902116734, conv2d_12_w, -0.034618977, 0.05792674, conv2d_12_b, -0.4237576, 0.11035452, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.1027569955587349, 9); 
-     void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 1.1027569955587349, conv2d_13_w, -0.035480656, 0.058295887, conv2d_13_b, -0.21477045, 0.14263579, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.4708798038959503, 9); 
-     void* var_13 = FCLayer_PROMISE(var_12, 0.0, 2.4708798038959503, dense_1_w, -0.08929961, 0.11301676, dense_1_b, -0.20798548, 0.47405547, 1, 0.0, 2.8148007798194876, 9); 
-     void* var_14 = FCLayer_PROMISE(var_13, 0.0, 2.8148007798194876, dense_2_w, -0.6627122, 0.35539475, dense_2_b, -1.0631907, 0.9830786, -1, -21.189617557525633, 22.645009384155276, 9); 
-     void* var_15 = tensorSoftmax(var_14); 
-
-     uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-     float accuracy = computeAccuracy2(labels, batch_size, var_15, 100); 
-     final_accuracy += accuracy; 
-     freeBatchMemory(); 
- 
-   }
-
-   final_accuracy = final_accuracy / batch_count; 
-   dumpFinalAccuracy(final_accuracy); 
- }
-
- dumpExecutionAccuracies(); 
-
- llvm_hpvm_cleanupTensorRt(); 
-
- return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_top5_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_top5_promise.cc
deleted file mode 100644
index 7911c645679f31171e1c1f87facc1c1f82640adc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_top5_promise.cc
+++ /dev/null
@@ -1,137 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
- llvm_hpvm_initTensorRt(3); 
-
- int total_runs = 1; 
- for (int i = 0 ; i < total_runs; i++){ 
-
-   startMemTracking(); 
-
-   int test_input_size = 4000; 
-   //int batch_size = 2500;
-   int batch_size = 4000;
-   int offset = 5000; 
-   int batch_count = test_input_size / batch_size; 
-   float final_accuracy = 0.0; 
-
-   for(int i = 0; i < batch_count; i++){ 
-
-     std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-     std::string input_path =  dir_prefix + std::string("input.bin"); 
-     std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-     std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-     void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-     std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-     void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-     void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-     std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-     void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-     void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-     std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-     void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-     void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-     std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-     void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-     void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-     std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-     void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-     void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-     void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-     void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-     void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-     void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-     std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-     void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-     void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-     void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-     void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-     void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-     void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-     void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-     void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-     void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-     void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-     void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-     void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-     std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-     void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-     void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-     std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-     void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-     int start = i * batch_size + offset; 
-     int end = (i + 1) * batch_size + offset; 
-
-     void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-     void* var_0 = ConvLayer_PROMISE(input, -1.7829767, 1.9456929, conv2d_1_w, -0.7450515, 0.71249133, conv2d_1_b, -1.5885142, 0.275554, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.7384350299835205, 9); 
-     void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 1.7384350299835205, conv2d_2_w, -0.30790088, 0.43504623, conv2d_2_b, -1.4242363, 1.2602744, 1, 1, 1, 1, 0, 2, 1, 0.0, 4.417154796123498, 9); 
-     void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 4.417154796123498, conv2d_3_w, -0.29189092, 0.26958522, conv2d_3_b, -1.0527138, 0.9075671, 1, 1, 1, 1, -1, 0, 1, 0.0, 3.1919608163833573, 9); 
-     void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 3.1919608163833573, conv2d_4_w, -0.15521508, 0.1829038, conv2d_4_b, -0.845419, 1.9358484, 1, 1, 1, 1, 0, 2, 1, 0.0, 5.108994026184064, 9); 
-     void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 5.108994026184064, conv2d_5_w, -0.13149762, 0.14811686, conv2d_5_b, -0.7162557, 1.0370971, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.8264513099193493, 9); 
-     void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 2.8264513099193493, conv2d_6_w, -0.06236292, 0.08321518, conv2d_6_b, -0.9067523, 0.9922458, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.507186658382409, 9); 
-     void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 2.507186658382409, conv2d_7_w, -0.06471479, 0.1024472, conv2d_7_b, -0.15943134, 0.7988499, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.550416946411133, 9); 
-     void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 2.550416946411133, conv2d_8_w, -0.06320205, 0.08291938, conv2d_8_b, -0.32540628, 0.5203079, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.7303829237818675, 9); 
-     void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 0.7303829237818675, conv2d_9_w, -0.037707984, 0.051601283, conv2d_9_b, -0.25622904, 0.11251946, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.32286912292241965, 9); 
-     void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 0.32286912292241965, conv2d_10_w, -0.056007143, 0.09549151, conv2d_10_b, -0.11591503, 0.06267536, 1, 1, 1, 1, 0, 2, 1, 0.0, 0.47936276525258825, 9); 
-     void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 0.47936276525258825, conv2d_11_w, -0.060094673, 0.10868926, conv2d_11_b, -0.105962686, 0.09584572, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.6409912902116734, 9); 
-     void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 0.6409912902116734, conv2d_12_w, -0.034618977, 0.05792674, conv2d_12_b, -0.4237576, 0.11035452, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.1027569955587349, 9); 
-     void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 1.1027569955587349, conv2d_13_w, -0.035480656, 0.058295887, conv2d_13_b, -0.21477045, 0.14263579, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.4708798038959503, 9); 
-     void* var_13 = FCLayer_PROMISE(var_12, 0.0, 2.4708798038959503, dense_1_w, -0.08929961, 0.11301676, dense_1_b, -0.20798548, 0.47405547, 1, 0.0, 2.8148007798194876, 9); 
-     void* var_14 = FCLayer_PROMISE(var_13, 0.0, 2.8148007798194876, dense_2_w, -0.6627122, 0.35539475, dense_2_b, -1.0631907, 0.9830786, -1, -21.189617557525633, 22.645009384155276, 9); 
-     void* var_15 = tensorSoftmax(var_14); 
-
-     uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-     // float accuracy = computeAccuracy2(labels, batch_size, var_15, 100);
-     float accuracy = computeTop5Accuracy(labels, batch_size, var_15, 100);
-
-     final_accuracy += accuracy; 
-     freeBatchMemory(); 
- 
-   }
-
-   final_accuracy = final_accuracy / batch_count; 
-   dumpFinalAccuracy(final_accuracy); 
- }
-
- dumpExecutionAccuracies(); 
-
- llvm_hpvm_cleanupTensorRt(); 
-
- return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_top5_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_top5_valid.cc
deleted file mode 100644
index 21eb3ba3e09a7a8ef5ae8940d4c60501ac01abe1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_top5_valid.cc
+++ /dev/null
@@ -1,136 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
- llvm_hpvm_initTensorRt(2); 
-
- int total_runs = 20; 
- for (int i = 0 ; i < total_runs; i++){ 
-
-   startMemTracking(); 
-
-   int test_input_size = 5000; 
-   int batch_size = 5000;
-   int offset = 0;
-   int batch_count = test_input_size / batch_size; 
-   float final_accuracy = 0.0; 
-
-   for(int i = 0; i < batch_count; i++){ 
-
-     std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-     std::string input_path =  dir_prefix + std::string("input.bin"); 
-     std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-     std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-     void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-     std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-     void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-     void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-     std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-     void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-     void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-     std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-     void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-     void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-     std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-     void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-     void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-     std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-     void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-     void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-     void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-     void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-     void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-     void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-     std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-     void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-     void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-     void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-     void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-     void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-     void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-     void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-     void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-     void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-     void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-     void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-     void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-     std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-     void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-     void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-     std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-     void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-     int start = i * batch_size + offset; 
-     int end = (i + 1) * batch_size + offset; 
-
-     void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-     void* var_0 = ConvLayer_PROMISE(input, -1.7829767, 1.9456929, conv2d_1_w, -0.7450515, 0.71249133, conv2d_1_b, -1.5885142, 0.275554, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.7384350299835205, 9); 
-     void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 1.7384350299835205, conv2d_2_w, -0.30790088, 0.43504623, conv2d_2_b, -1.4242363, 1.2602744, 1, 1, 1, 1, 0, 2, 1, 0.0, 4.417154796123498, 9); 
-     void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 4.417154796123498, conv2d_3_w, -0.29189092, 0.26958522, conv2d_3_b, -1.0527138, 0.9075671, 1, 1, 1, 1, -1, 0, 1, 0.0, 3.1919608163833573, 9); 
-     void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 3.1919608163833573, conv2d_4_w, -0.15521508, 0.1829038, conv2d_4_b, -0.845419, 1.9358484, 1, 1, 1, 1, 0, 2, 1, 0.0, 5.108994026184064, 9); 
-     void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 5.108994026184064, conv2d_5_w, -0.13149762, 0.14811686, conv2d_5_b, -0.7162557, 1.0370971, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.8264513099193493, 9); 
-     void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 2.8264513099193493, conv2d_6_w, -0.06236292, 0.08321518, conv2d_6_b, -0.9067523, 0.9922458, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.507186658382409, 9); 
-     void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 2.507186658382409, conv2d_7_w, -0.06471479, 0.1024472, conv2d_7_b, -0.15943134, 0.7988499, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.550416946411133, 9); 
-     void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 2.550416946411133, conv2d_8_w, -0.06320205, 0.08291938, conv2d_8_b, -0.32540628, 0.5203079, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.7303829237818675, 9); 
-     void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 0.7303829237818675, conv2d_9_w, -0.037707984, 0.051601283, conv2d_9_b, -0.25622904, 0.11251946, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.32286912292241965, 9); 
-     void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 0.32286912292241965, conv2d_10_w, -0.056007143, 0.09549151, conv2d_10_b, -0.11591503, 0.06267536, 1, 1, 1, 1, 0, 2, 1, 0.0, 0.47936276525258825, 9); 
-     void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 0.47936276525258825, conv2d_11_w, -0.060094673, 0.10868926, conv2d_11_b, -0.105962686, 0.09584572, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.6409912902116734, 9); 
-     void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 0.6409912902116734, conv2d_12_w, -0.034618977, 0.05792674, conv2d_12_b, -0.4237576, 0.11035452, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.1027569955587349, 9); 
-     void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 1.1027569955587349, conv2d_13_w, -0.035480656, 0.058295887, conv2d_13_b, -0.21477045, 0.14263579, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.4708798038959503, 9); 
-     void* var_13 = FCLayer_PROMISE(var_12, 0.0, 2.4708798038959503, dense_1_w, -0.08929961, 0.11301676, dense_1_b, -0.20798548, 0.47405547, 1, 0.0, 2.8148007798194876, 9); 
-     void* var_14 = FCLayer_PROMISE(var_13, 0.0, 2.8148007798194876, dense_2_w, -0.6627122, 0.35539475, dense_2_b, -1.0631907, 0.9830786, -1, -21.189617557525633, 22.645009384155276, 9); 
-     void* var_15 = tensorSoftmax(var_14); 
-
-     uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-     //-- float accuracy = computeAccuracy2(labels, batch_size, var_15, 100);
-     float accuracy = computeTop5Accuracy(labels, batch_size, var_15, 100);
-
-     final_accuracy += accuracy; 
-     freeBatchMemory(); 
- 
-   }
-
-   final_accuracy = final_accuracy / batch_count; 
-   dumpFinalAccuracy(final_accuracy); 
- }
-
- dumpExecutionAccuracies(); 
-
- llvm_hpvm_cleanupTensorRt(); 
-
- return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_valid.cc
deleted file mode 100644
index b78c506e618535be50e92d9e77a59ecdd793d720..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar100_valid.cc
+++ /dev/null
@@ -1,134 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
- llvm_hpvm_initTensorRt(0); 
-
- int total_runs = 20; 
- for (int i = 0 ; i < total_runs; i++){ 
-
-   startMemTracking(); 
-
-   int test_input_size = 5000; 
-   int batch_size = 2500;
-   int offset = 0;
-   int batch_count = test_input_size / batch_size; 
-   float final_accuracy = 0.0; 
-
-   for(int i = 0; i < batch_count; i++){ 
-
-     std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-     std::string input_path =  dir_prefix + std::string("input.bin"); 
-     std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-     std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-     void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-     std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-     void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-     void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-     std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-     void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-     void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-     std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-     void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-     void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-     std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-     void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-     void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-     std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-     void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-     void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-     void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-     void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-     void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-     void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-     std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-     void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-     void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-     void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-     void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-     void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-     void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-     void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-     void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-     void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-     void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-     void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-     void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-     std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-     void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-     void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-     std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-     void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-     int start = i * batch_size + offset; 
-     int end = (i + 1) * batch_size + offset; 
-
-     void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-     void* var_0 = ConvLayer_PROMISE(input, -1.7829767, 1.9456929, conv2d_1_w, -0.7450515, 0.71249133, conv2d_1_b, -1.5885142, 0.275554, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.7384350299835205, 9); 
-     void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 1.7384350299835205, conv2d_2_w, -0.30790088, 0.43504623, conv2d_2_b, -1.4242363, 1.2602744, 1, 1, 1, 1, 0, 2, 1, 0.0, 4.417154796123498, 9); 
-     void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 4.417154796123498, conv2d_3_w, -0.29189092, 0.26958522, conv2d_3_b, -1.0527138, 0.9075671, 1, 1, 1, 1, -1, 0, 1, 0.0, 3.1919608163833573, 9); 
-     void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 3.1919608163833573, conv2d_4_w, -0.15521508, 0.1829038, conv2d_4_b, -0.845419, 1.9358484, 1, 1, 1, 1, 0, 2, 1, 0.0, 5.108994026184064, 9); 
-     void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 5.108994026184064, conv2d_5_w, -0.13149762, 0.14811686, conv2d_5_b, -0.7162557, 1.0370971, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.8264513099193493, 9); 
-     void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 2.8264513099193493, conv2d_6_w, -0.06236292, 0.08321518, conv2d_6_b, -0.9067523, 0.9922458, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.507186658382409, 9); 
-     void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 2.507186658382409, conv2d_7_w, -0.06471479, 0.1024472, conv2d_7_b, -0.15943134, 0.7988499, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.550416946411133, 9); 
-     void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 2.550416946411133, conv2d_8_w, -0.06320205, 0.08291938, conv2d_8_b, -0.32540628, 0.5203079, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.7303829237818675, 9); 
-     void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 0.7303829237818675, conv2d_9_w, -0.037707984, 0.051601283, conv2d_9_b, -0.25622904, 0.11251946, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.32286912292241965, 9); 
-     void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 0.32286912292241965, conv2d_10_w, -0.056007143, 0.09549151, conv2d_10_b, -0.11591503, 0.06267536, 1, 1, 1, 1, 0, 2, 1, 0.0, 0.47936276525258825, 9); 
-     void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 0.47936276525258825, conv2d_11_w, -0.060094673, 0.10868926, conv2d_11_b, -0.105962686, 0.09584572, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.6409912902116734, 9); 
-     void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 0.6409912902116734, conv2d_12_w, -0.034618977, 0.05792674, conv2d_12_b, -0.4237576, 0.11035452, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.1027569955587349, 9); 
-     void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 1.1027569955587349, conv2d_13_w, -0.035480656, 0.058295887, conv2d_13_b, -0.21477045, 0.14263579, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.4708798038959503, 9); 
-     void* var_13 = FCLayer_PROMISE(var_12, 0.0, 2.4708798038959503, dense_1_w, -0.08929961, 0.11301676, dense_1_b, -0.20798548, 0.47405547, 1, 0.0, 2.8148007798194876, 9); 
-     void* var_14 = FCLayer_PROMISE(var_13, 0.0, 2.8148007798194876, dense_2_w, -0.6627122, 0.35539475, dense_2_b, -1.0631907, 0.9830786, -1, -21.189617557525633, 22.645009384155276, 9); 
-     void* var_15 = tensorSoftmax(var_14); 
-
-     uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-     float accuracy = computeAccuracy2(labels, batch_size, var_15, 100); 
-     final_accuracy += accuracy; 
-     freeBatchMemory(); 
- 
-   }
-
-   final_accuracy = final_accuracy / batch_count; 
-   dumpFinalAccuracy(final_accuracy); 
- }
-
- dumpExecutionAccuracies(); 
-
- llvm_hpvm_cleanupTensorRt(); 
-
- return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar10_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar10_promise.cc
deleted file mode 100644
index 4398e721f9f7bebd28e54d1d5e682b712a159f8e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar10_promise.cc
+++ /dev/null
@@ -1,174 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-
-int total_runs = 1;
-float bench_acc = 0;
-int to_skip = 5;
-
-
-int main(int argc, char* argv[]){ 
-
- int test_input_size = 3000; 
- int batch_size = 1000;
- int offset = 5000;
-
- 
- if (argc > 1){
-   total_runs = atoi(argv[1]);
- }
-
- if (argc > 2){
-   bench_acc = atof(argv[2]);
- }
-
- if(argc > 3){
-   to_skip = atoi(argv[3]);   
- }
-
- if(argc > 4){
-   test_input_size = atoi(argv[4]);   
- }
-
- if(argc > 5){
-   offset = atoi(argv[5]);   
- }
-
-
- llvm_hpvm_initTensorRt(1); 
-
- int missed = 0; 
- for (int i = 0 ; i < total_runs; i++){ 
-
-   if (missed >= to_skip){
-     break;           
-   }
-   
-   startMemTracking(); 
-
-   
-   int batch_count = test_input_size / batch_size; 
-   float final_accuracy = 0.0; 
-
-   for(int i = 0; i < batch_count; i++){ 
-
-     std::string dir_prefix = std::string("../model_params/vgg16_cifar10_2/");
-       
-     std::string input_path =  dir_prefix + std::string("input.bin"); 
-     std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-     std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-     void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-     std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-     void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-     void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-     std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-     void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-     void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-     std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-     void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-     void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-     std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-     void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-     void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-     std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-     void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-     void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-     void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-     void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-     void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-     void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-     std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-     void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-     void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-     void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-     void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-     void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-     void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-     void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-     void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-     void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-     void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-     void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-     void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-     std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-     void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-     void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-     std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-     void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-     int start = i * batch_size + offset; 
-     int end = (i + 1) * batch_size + offset; 
-
-     void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-     void* var_0 = ConvLayer_PROMISE(input, -1.8816367, 2.0934217, conv2d_1_w, -0.53275156, 0.49437004, conv2d_1_b, -0.6403629, 0.2490165, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.3590874671936035, 9); 
-     void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 1.3590874671936035, conv2d_2_w, -0.2688396, 0.20639156, conv2d_2_b, -0.7745511, 0.82006615, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.521231179237361, 9); 
-     void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 2.521231179237361, conv2d_3_w, -0.16776876, 0.14878987, conv2d_3_b, -0.35283303, 0.5154362, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.2011985784769053, 9); 
-     void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 1.2011985784769053, conv2d_4_w, -0.088948585, 0.114222586, conv2d_4_b, -0.30250227, 0.36856708, 1, 1, 1, 1, 0, 2, 1, 0.0, 1.0359880930185312, 9); 
-     void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 1.0359880930185312, conv2d_5_w, -0.07739562, 0.10973293, conv2d_5_b, -0.15568458, 0.17634983, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.3004955950379369, 9); 
-     void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 0.3004955950379369, conv2d_6_w, -0.051649556, 0.05435231, conv2d_6_b, -0.07395447, 0.07996062, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.11490475405007583, 9); 
-     void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 0.11490475405007583, conv2d_7_w, -0.043513633, 0.07577866, conv2d_7_b, -0.06921874, 0.02660573, 1, 1, 1, 1, 0, 2, 1, 0.0, 0.16232508487999475, 9); 
-     void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 0.16232508487999475, conv2d_8_w, -0.033842053, 0.045218028, conv2d_8_b, -0.022827804, 0.023845317, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.12424996573477909, 9); 
-     void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 0.12424996573477909, conv2d_9_w, -0.02211613, 0.032084666, conv2d_9_b, -0.02699063, 0.03773564, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.1746344865113496, 9); 
-     void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 0.1746344865113496, conv2d_10_w, -0.01979376, 0.034854397, conv2d_10_b, -0.036107242, 0.07056531, 1, 1, 1, 1, 0, 2, 1, 0.0, 0.5751757621765137, 9); 
-     void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 0.5751757621765137, conv2d_11_w, -0.03452098, 0.046055835, conv2d_11_b, -0.051925894, 0.07039055, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.7718751144409115, 9); 
-     void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 0.7718751144409115, conv2d_12_w, -0.025946895, 0.040090334, conv2d_12_b, -0.06049362, 0.12658806, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.1728516906499844, 9); 
-     void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 1.1728516906499844, conv2d_13_w, -0.021766115, 0.03315237, conv2d_13_b, -0.20705001, 0.117947325, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.0015769386291495, 9); 
-     void* var_13 = FCLayer_PROMISE(var_12, 0.0, 2.0015769386291495, dense_1_w, -0.042597745, 0.046707444, dense_1_b, -0.21937433, 0.2545502, 1, 0.0, 2.002361118793486, 9); 
-     void* var_14 = FCLayer_PROMISE(var_13, 0.0, 2.002361118793486, dense_2_w, -0.32550547, 0.30829763, dense_2_b, -1.1787822, 1.2378151, -1, -18.251470546722413, 24.17363445281988, 9); 
-     void* var_15 = tensorSoftmax(var_14); 
-
-     uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-     float accuracy = computeAccuracy2(labels, batch_size, var_15); 
-     final_accuracy += accuracy; 
-     freeBatchMemory(); 
- 
-   }
-
-   final_accuracy = final_accuracy / batch_count; 
-   dumpFinalAccuracy(final_accuracy); 
-
-
-   if (final_accuracy < bench_acc)
-     missed += 1;
- }
-
-
- dumpExecutionAccuracies(); 
-
- llvm_hpvm_cleanupTensorRt(); 
-
- return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar10_valid.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar10_valid.cc
deleted file mode 100644
index fbaea86634e2b561f78fd3971a731b1a734dcbaf..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/promise/vgg16_cifar10_valid.cc
+++ /dev/null
@@ -1,137 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
- llvm_hpvm_initTensorRt(0); 
-
- int total_runs = 20; 
- for (int i = 0 ; i < total_runs; i++){ 
-
-   startMemTracking(); 
-
-   int test_input_size = 5000; 
-   int batch_size = 2500;
-   int offset = 0;
-   int batch_count = test_input_size / batch_size; 
-   float final_accuracy = 0.0; 
-
-   for(int i = 0; i < batch_count; i++){ 
-
-     std::string dir_prefix = std::string("../model_params/vgg16_cifar10_2/");
-       
-     std::string input_path =  dir_prefix + std::string("input.bin"); 
-     std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-     std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-     void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-     std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-     void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-     void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-     std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-     void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-     std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-     void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-     std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-     void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-     void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-     std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-     void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-     std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-     void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-     std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-     void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-     void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-     void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-     void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-     std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-     void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-     std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-     void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-     std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-     void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-     void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-     void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-     void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-     void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-     void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-     void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-     void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-     void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-     std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-     void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-     std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-     void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-     void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-     std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-     void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-     std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-     void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-     std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-     void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-     int start = i * batch_size + offset; 
-     int end = (i + 1) * batch_size + offset; 
-
-     void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-     void* var_0 = ConvLayer_PROMISE(input, -1.8816367, 2.0934217, conv2d_1_w, -0.53275156, 0.49437004, conv2d_1_b, -0.6403629, 0.2490165, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.3590874671936035, 9); 
-     void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 1.3590874671936035, conv2d_2_w, -0.2688396, 0.20639156, conv2d_2_b, -0.7745511, 0.82006615, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.521231179237361, 9); 
-     void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 2.521231179237361, conv2d_3_w, -0.16776876, 0.14878987, conv2d_3_b, -0.35283303, 0.5154362, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.2011985784769053, 9); 
-     void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 1.2011985784769053, conv2d_4_w, -0.088948585, 0.114222586, conv2d_4_b, -0.30250227, 0.36856708, 1, 1, 1, 1, 0, 2, 1, 0.0, 1.0359880930185312, 9); 
-     void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 1.0359880930185312, conv2d_5_w, -0.07739562, 0.10973293, conv2d_5_b, -0.15568458, 0.17634983, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.3004955950379369, 9); 
-     void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 0.3004955950379369, conv2d_6_w, -0.051649556, 0.05435231, conv2d_6_b, -0.07395447, 0.07996062, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.11490475405007583, 9); 
-     void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 0.11490475405007583, conv2d_7_w, -0.043513633, 0.07577866, conv2d_7_b, -0.06921874, 0.02660573, 1, 1, 1, 1, 0, 2, 1, 0.0, 0.16232508487999475, 9); 
-     void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 0.16232508487999475, conv2d_8_w, -0.033842053, 0.045218028, conv2d_8_b, -0.022827804, 0.023845317, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.12424996573477909, 9); 
-     void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 0.12424996573477909, conv2d_9_w, -0.02211613, 0.032084666, conv2d_9_b, -0.02699063, 0.03773564, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.1746344865113496, 9); 
-     void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 0.1746344865113496, conv2d_10_w, -0.01979376, 0.034854397, conv2d_10_b, -0.036107242, 0.07056531, 1, 1, 1, 1, 0, 2, 1, 0.0, 0.5751757621765137, 9); 
-     void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 0.5751757621765137, conv2d_11_w, -0.03452098, 0.046055835, conv2d_11_b, -0.051925894, 0.07039055, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.7718751144409115, 9); 
-     void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 0.7718751144409115, conv2d_12_w, -0.025946895, 0.040090334, conv2d_12_b, -0.06049362, 0.12658806, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.1728516906499844, 9); 
-     void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 1.1728516906499844, conv2d_13_w, -0.021766115, 0.03315237, conv2d_13_b, -0.20705001, 0.117947325, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.0015769386291495, 9); 
-     void* var_13 = FCLayer_PROMISE(var_12, 0.0, 2.0015769386291495, dense_1_w, -0.042597745, 0.046707444, dense_1_b, -0.21937433, 0.2545502, 1, 0.0, 2.002361118793486, 9); 
-     void* var_14 = FCLayer_PROMISE(var_13, 0.0, 2.002361118793486, dense_2_w, -0.32550547, 0.30829763, dense_2_b, -1.1787822, 1.2378151, -1, -18.251470546722413, 24.17363445281988, 9); 
-     void* var_15 = tensorSoftmax(var_14); 
-
-     uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-     float accuracy = computeAccuracy2(labels, batch_size, var_15); 
-     final_accuracy += accuracy; 
-     freeBatchMemory(); 
- 
-   }
-
-   final_accuracy = final_accuracy / batch_count; 
-   dumpFinalAccuracy(final_accuracy); 
-
-
- }
-
- dumpExecutionAccuracies(); 
-
- llvm_hpvm_cleanupTensorRt(); 
-
- return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10.cc
deleted file mode 100644
index 97f9ef79a58b0cbac3c38352b1ca3354ec803cf0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10.cc
+++ /dev/null
@@ -1,221 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-  
-  std::string dir_prefix = std::string("../model_params/resnet18_cifar10_3/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0, batch_size,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(), batch_size); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-  void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-  std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-  void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-  std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-  void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-  void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-  void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-  void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-  void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-  void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 10000;
-  int batch_size = 2000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-    
-    void* var_2 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-    void* var_3 = tensorAdd(var_2, conv2d_1_b); 
-    void* var_4 = tensorRelu(var_3); 
-    void* var_6 = tensorConvolution(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-    void* var_7 = tensorAdd(var_6, conv2d_2_b); 
-    void* var_8 = tensorRelu(var_7); 
-    void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-    void* var_12 = tensorAdd(var_4, var_11); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_15 = tensorConvolution(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_16 = tensorAdd(var_15, conv2d_4_b); 
-    void* var_17 = tensorRelu(var_16); 
-    void* var_19 = tensorConvolution(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_20 = tensorAdd(var_19, conv2d_5_b); 
-    void* var_21 = tensorAdd(var_13, var_20); 
-    void* var_22 = tensorRelu(var_21); 
-    void* var_24 = tensorConvolution(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-    void* var_25 = tensorAdd(var_24, conv2d_6_b); 
-    void* var_26 = tensorRelu(var_25); 
-    void* var_28 = tensorConvolution(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-    void* var_29 = tensorAdd(var_28, conv2d_7_b); 
-    void* var_30 = tensorAdd(var_22, var_29); 
-    void* var_31 = tensorRelu(var_30); 
-    void* var_33 = tensorConvolution(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-    void* var_34 = tensorAdd(var_33, conv2d_8_b); 
-    void* var_35 = tensorRelu(var_34); 
-    void* var_37 = tensorConvolution(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-    void* var_38 = tensorAdd(var_37, conv2d_9_b); 
-    void* var_40 = tensorConvolution(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-    void* var_41 = tensorAdd(var_40, conv2d_10_b); 
-    void* var_42 = tensorAdd(var_41, var_38); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_45 = tensorConvolution(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-    void* var_46 = tensorAdd(var_45, conv2d_11_b); 
-    void* var_47 = tensorRelu(var_46); 
-    void* var_49 = tensorConvolution(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-    void* var_50 = tensorAdd(var_49, conv2d_12_b); 
-    void* var_51 = tensorAdd(var_43, var_50); 
-    void* var_52 = tensorRelu(var_51); 
-    void* var_54 = tensorConvolution(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-    void* var_55 = tensorAdd(var_54, conv2d_13_b); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_58 = tensorConvolution(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-    void* var_59 = tensorAdd(var_58, conv2d_14_b); 
-    void* var_60 = tensorAdd(var_52, var_59); 
-    void* var_61 = tensorRelu(var_60); 
-    void* var_63 = tensorConvolution(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-    void* var_64 = tensorAdd(var_63, conv2d_15_b); 
-    void* var_65 = tensorRelu(var_64); 
-    void* var_67 = tensorConvolution(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-    void* var_68 = tensorAdd(var_67, conv2d_16_b); 
-    void* var_70 = tensorConvolution(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-    void* var_71 = tensorAdd(var_70, conv2d_17_b); 
-    void* var_72 = tensorAdd(var_71, var_68); 
-    void* var_73 = tensorRelu(var_72); 
-    void* var_75 = tensorConvolution(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-    void* var_76 = tensorAdd(var_75, conv2d_18_b); 
-    void* var_77 = tensorRelu(var_76); 
-    void* var_79 = tensorConvolution(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-    void* var_80 = tensorAdd(var_79, conv2d_19_b); 
-    void* var_81 = tensorAdd(var_73, var_80); 
-    void* var_82 = tensorRelu(var_81); 
-    void* var_84 = tensorConvolution(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-    void* var_85 = tensorAdd(var_84, conv2d_20_b); 
-    void* var_86 = tensorRelu(var_85); 
-    void* var_88 = tensorConvolution(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-    void* var_89 = tensorAdd(var_88, conv2d_21_b); 
-    void* var_90 = tensorAdd(var_82, var_89); 
-    void* var_91 = tensorRelu(var_90); 
-    void* var_92 = tensorPooling(var_91,1,8,8,0,0,8,8); 
-    void* var_94 = tensorGemmGPU(var_92, dense_1_w); 
-    void* var_95 = tensorAdd(var_94, dense_1_b); 
-    void* var_96 = tensorSoftmax(var_95); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels,batch_size,var_96); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_approx.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_approx.cc
deleted file mode 100644
index 11cc3a38dd5da9dfcee7dd1181ab7e9a099fef88..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_approx.cc
+++ /dev/null
@@ -1,221 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-  
-  std::string dir_prefix = std::string("../model_params/resnet18_cifar10_3/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0, batch_size,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(), batch_size); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-  void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-  std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-  void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-  std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-  void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-  void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-  void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-  void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-  void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-  void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 10000;
-  int batch_size = 2000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-    
-    void* var_2 = tensorConvPerf(input, conv2d_1_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_3 = tensorAdd(var_2, conv2d_1_b); 
-    void* var_4 = tensorRelu(var_3); 
-    void* var_6 = tensorConvPerf(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_7 = tensorAdd(var_6, conv2d_2_b); 
-    void* var_8 = tensorRelu(var_7); 
-    void* var_10 = tensorConvPerf(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-    void* var_12 = tensorAdd(var_4, var_11); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_15 = tensorConvPerf(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_16 = tensorAdd(var_15, conv2d_4_b); 
-    void* var_17 = tensorRelu(var_16); 
-    void* var_19 = tensorConvPerf(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_20 = tensorAdd(var_19, conv2d_5_b); 
-    void* var_21 = tensorAdd(var_13, var_20); 
-    void* var_22 = tensorRelu(var_21); 
-    void* var_24 = tensorConvPerf(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_25 = tensorAdd(var_24, conv2d_6_b); 
-    void* var_26 = tensorRelu(var_25); 
-    void* var_28 = tensorConvPerf(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_29 = tensorAdd(var_28, conv2d_7_b); 
-    void* var_30 = tensorAdd(var_22, var_29); 
-    void* var_31 = tensorRelu(var_30); 
-    void* var_33 = tensorConvPerf(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0, 0, 0); 
-    void* var_34 = tensorAdd(var_33, conv2d_8_b); 
-    void* var_35 = tensorRelu(var_34); 
-    void* var_37 = tensorConvPerf(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_38 = tensorAdd(var_37, conv2d_9_b); 
-    void* var_40 = tensorConvPerf(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0, 0, 0); 
-    void* var_41 = tensorAdd(var_40, conv2d_10_b); 
-    void* var_42 = tensorAdd(var_41, var_38); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_45 = tensorConvPerf(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_46 = tensorAdd(var_45, conv2d_11_b); 
-    void* var_47 = tensorRelu(var_46); 
-    void* var_49 = tensorConvPerf(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_50 = tensorAdd(var_49, conv2d_12_b); 
-    void* var_51 = tensorAdd(var_43, var_50); 
-    void* var_52 = tensorRelu(var_51); 
-    void* var_54 = tensorConvPerf(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0, 1, 0); 
-    void* var_55 = tensorAdd(var_54, conv2d_13_b); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_58 = tensorConvPerf(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0, 0, 1); 
-    void* var_59 = tensorAdd(var_58, conv2d_14_b); 
-    void* var_60 = tensorAdd(var_52, var_59); 
-    void* var_61 = tensorRelu(var_60); 
-    void* var_63 = tensorConvPerf(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0, 0, 0); 
-    void* var_64 = tensorAdd(var_63, conv2d_15_b); 
-    void* var_65 = tensorRelu(var_64); 
-    void* var_67 = tensorConvPerf(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_68 = tensorAdd(var_67, conv2d_16_b); 
-    void* var_70 = tensorConvPerf(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0, 0, 0); 
-    void* var_71 = tensorAdd(var_70, conv2d_17_b); 
-    void* var_72 = tensorAdd(var_71, var_68); 
-    void* var_73 = tensorRelu(var_72); 
-    void* var_75 = tensorConvPerf(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_76 = tensorAdd(var_75, conv2d_18_b); 
-    void* var_77 = tensorRelu(var_76); 
-    void* var_79 = tensorConvPerf(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_80 = tensorAdd(var_79, conv2d_19_b); 
-    void* var_81 = tensorAdd(var_73, var_80); 
-    void* var_82 = tensorRelu(var_81); 
-    void* var_84 = tensorConvPerf(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_85 = tensorAdd(var_84, conv2d_20_b); 
-    void* var_86 = tensorRelu(var_85); 
-    void* var_88 = tensorConvPerf(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0, 0, 0); 
-    void* var_89 = tensorAdd(var_88, conv2d_21_b); 
-    void* var_90 = tensorAdd(var_82, var_89); 
-    void* var_91 = tensorRelu(var_90); 
-    void* var_92 = tensorPooling(var_91,1,8,8,0,0,8,8); 
-    void* var_94 = tensorGemmGPU(var_92, dense_1_w); 
-    void* var_95 = tensorAdd(var_94, dense_1_b); 
-    void* var_96 = tensorSoftmax(var_95); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels,batch_size,var_96); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_cpu.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_cpu.cc
deleted file mode 100644
index df540551d71814bf4bf18d349bf08cb03151e1dc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_cpu.cc
+++ /dev/null
@@ -1,253 +0,0 @@
-
-#include <fcntl.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_cpu_runtime.h"
-#include "../include/types.h"
-#include "../include/utils_cpu.h"
-int main() {
-
-  llvm_hpvm_initTensorRt(0);
-
-  std::string dir_prefix = std::string("./model_params/resnet18_cifar10_3/");
-  std::string input_path = dir_prefix + std::string("input.bin");
-  // void* input = readTrainedWeightsCPU(input_path.c_str(), 0, batch_size,3,32,32);
-  std::string labels_path = dir_prefix + std::string("labels.bin");
-  
-  std::string conv2d_1_w_path = dir_prefix + std::string("conv2d_1_w.bin");
-  void *conv2d_1_w =
-      readTrainedWeightsCPU(conv2d_1_w_path.c_str(), 0, 16, 3, 3, 3);
-  std::string conv2d_1_b_path = dir_prefix + std::string("conv2d_1_b.bin");
-  void *conv2d_1_b =
-      readTrainedWeightsCPU(conv2d_1_b_path.c_str(), 0, 1, 16, 1, 1);
-  std::string conv2d_2_w_path = dir_prefix + std::string("conv2d_2_w.bin");
-  void *conv2d_2_w =
-      readTrainedWeightsCPU(conv2d_2_w_path.c_str(), 0, 16, 16, 3, 3);
-  std::string conv2d_2_b_path = dir_prefix + std::string("conv2d_2_b.bin");
-  void *conv2d_2_b =
-      readTrainedWeightsCPU(conv2d_2_b_path.c_str(), 0, 1, 16, 1, 1);
-  std::string conv2d_3_w_path = dir_prefix + std::string("conv2d_3_w.bin");
-  void *conv2d_3_w =
-      readTrainedWeightsCPU(conv2d_3_w_path.c_str(), 0, 16, 16, 3, 3);
-  std::string conv2d_3_b_path = dir_prefix + std::string("conv2d_3_b.bin");
-  void *conv2d_3_b =
-      readTrainedWeightsCPU(conv2d_3_b_path.c_str(), 0, 1, 16, 1, 1);
-  std::string conv2d_4_w_path = dir_prefix + std::string("conv2d_4_w.bin");
-  void *conv2d_4_w =
-      readTrainedWeightsCPU(conv2d_4_w_path.c_str(), 0, 16, 16, 3, 3);
-  std::string conv2d_4_b_path = dir_prefix + std::string("conv2d_4_b.bin");
-  void *conv2d_4_b =
-      readTrainedWeightsCPU(conv2d_4_b_path.c_str(), 0, 1, 16, 1, 1);
-  std::string conv2d_5_w_path = dir_prefix + std::string("conv2d_5_w.bin");
-  void *conv2d_5_w =
-      readTrainedWeightsCPU(conv2d_5_w_path.c_str(), 0, 16, 16, 3, 3);
-  std::string conv2d_5_b_path = dir_prefix + std::string("conv2d_5_b.bin");
-  void *conv2d_5_b =
-      readTrainedWeightsCPU(conv2d_5_b_path.c_str(), 0, 1, 16, 1, 1);
-  std::string conv2d_6_w_path = dir_prefix + std::string("conv2d_6_w.bin");
-  void *conv2d_6_w =
-      readTrainedWeightsCPU(conv2d_6_w_path.c_str(), 0, 16, 16, 3, 3);
-  std::string conv2d_6_b_path = dir_prefix + std::string("conv2d_6_b.bin");
-  void *conv2d_6_b =
-      readTrainedWeightsCPU(conv2d_6_b_path.c_str(), 0, 1, 16, 1, 1);
-  std::string conv2d_7_w_path = dir_prefix + std::string("conv2d_7_w.bin");
-  void *conv2d_7_w =
-      readTrainedWeightsCPU(conv2d_7_w_path.c_str(), 0, 16, 16, 3, 3);
-  std::string conv2d_7_b_path = dir_prefix + std::string("conv2d_7_b.bin");
-  void *conv2d_7_b =
-      readTrainedWeightsCPU(conv2d_7_b_path.c_str(), 0, 1, 16, 1, 1);
-  std::string conv2d_8_w_path = dir_prefix + std::string("conv2d_8_w.bin");
-  void *conv2d_8_w =
-      readTrainedWeightsCPU(conv2d_8_w_path.c_str(), 0, 32, 16, 3, 3);
-  std::string conv2d_8_b_path = dir_prefix + std::string("conv2d_8_b.bin");
-  void *conv2d_8_b =
-      readTrainedWeightsCPU(conv2d_8_b_path.c_str(), 0, 1, 32, 1, 1);
-  std::string conv2d_10_w_path = dir_prefix + std::string("conv2d_10_w.bin");
-  void *conv2d_10_w =
-      readTrainedWeightsCPU(conv2d_10_w_path.c_str(), 0, 32, 16, 1, 1);
-  std::string conv2d_10_b_path = dir_prefix + std::string("conv2d_10_b.bin");
-  void *conv2d_10_b =
-      readTrainedWeightsCPU(conv2d_10_b_path.c_str(), 0, 1, 32, 1, 1);
-  std::string conv2d_9_w_path = dir_prefix + std::string("conv2d_9_w.bin");
-  void *conv2d_9_w =
-      readTrainedWeightsCPU(conv2d_9_w_path.c_str(), 0, 32, 32, 3, 3);
-  std::string conv2d_9_b_path = dir_prefix + std::string("conv2d_9_b.bin");
-  void *conv2d_9_b =
-      readTrainedWeightsCPU(conv2d_9_b_path.c_str(), 0, 1, 32, 1, 1);
-  std::string conv2d_11_w_path = dir_prefix + std::string("conv2d_11_w.bin");
-  void *conv2d_11_w =
-      readTrainedWeightsCPU(conv2d_11_w_path.c_str(), 0, 32, 32, 3, 3);
-  std::string conv2d_11_b_path = dir_prefix + std::string("conv2d_11_b.bin");
-  void *conv2d_11_b =
-      readTrainedWeightsCPU(conv2d_11_b_path.c_str(), 0, 1, 32, 1, 1);
-  std::string conv2d_12_w_path = dir_prefix + std::string("conv2d_12_w.bin");
-  void *conv2d_12_w =
-      readTrainedWeightsCPU(conv2d_12_w_path.c_str(), 0, 32, 32, 3, 3);
-  std::string conv2d_12_b_path = dir_prefix + std::string("conv2d_12_b.bin");
-  void *conv2d_12_b =
-      readTrainedWeightsCPU(conv2d_12_b_path.c_str(), 0, 1, 32, 1, 1);
-  std::string conv2d_13_w_path = dir_prefix + std::string("conv2d_13_w.bin");
-  void *conv2d_13_w =
-      readTrainedWeightsCPU(conv2d_13_w_path.c_str(), 0, 32, 32, 3, 3);
-  std::string conv2d_13_b_path = dir_prefix + std::string("conv2d_13_b.bin");
-  void *conv2d_13_b =
-      readTrainedWeightsCPU(conv2d_13_b_path.c_str(), 0, 1, 32, 1, 1);
-  std::string conv2d_14_w_path = dir_prefix + std::string("conv2d_14_w.bin");
-  void *conv2d_14_w =
-      readTrainedWeightsCPU(conv2d_14_w_path.c_str(), 0, 32, 32, 3, 3);
-  std::string conv2d_14_b_path = dir_prefix + std::string("conv2d_14_b.bin");
-  void *conv2d_14_b =
-      readTrainedWeightsCPU(conv2d_14_b_path.c_str(), 0, 1, 32, 1, 1);
-  std::string conv2d_15_w_path = dir_prefix + std::string("conv2d_15_w.bin");
-  void *conv2d_15_w =
-      readTrainedWeightsCPU(conv2d_15_w_path.c_str(), 0, 64, 32, 3, 3);
-  std::string conv2d_15_b_path = dir_prefix + std::string("conv2d_15_b.bin");
-  void *conv2d_15_b =
-      readTrainedWeightsCPU(conv2d_15_b_path.c_str(), 0, 1, 64, 1, 1);
-  std::string conv2d_17_w_path = dir_prefix + std::string("conv2d_17_w.bin");
-  void *conv2d_17_w =
-      readTrainedWeightsCPU(conv2d_17_w_path.c_str(), 0, 64, 32, 1, 1);
-  std::string conv2d_17_b_path = dir_prefix + std::string("conv2d_17_b.bin");
-  void *conv2d_17_b =
-      readTrainedWeightsCPU(conv2d_17_b_path.c_str(), 0, 1, 64, 1, 1);
-  std::string conv2d_16_w_path = dir_prefix + std::string("conv2d_16_w.bin");
-  void *conv2d_16_w =
-      readTrainedWeightsCPU(conv2d_16_w_path.c_str(), 0, 64, 64, 3, 3);
-  std::string conv2d_16_b_path = dir_prefix + std::string("conv2d_16_b.bin");
-  void *conv2d_16_b =
-      readTrainedWeightsCPU(conv2d_16_b_path.c_str(), 0, 1, 64, 1, 1);
-  std::string conv2d_18_w_path = dir_prefix + std::string("conv2d_18_w.bin");
-  void *conv2d_18_w =
-      readTrainedWeightsCPU(conv2d_18_w_path.c_str(), 0, 64, 64, 3, 3);
-  std::string conv2d_18_b_path = dir_prefix + std::string("conv2d_18_b.bin");
-  void *conv2d_18_b =
-      readTrainedWeightsCPU(conv2d_18_b_path.c_str(), 0, 1, 64, 1, 1);
-  std::string conv2d_19_w_path = dir_prefix + std::string("conv2d_19_w.bin");
-  void *conv2d_19_w =
-      readTrainedWeightsCPU(conv2d_19_w_path.c_str(), 0, 64, 64, 3, 3);
-  std::string conv2d_19_b_path = dir_prefix + std::string("conv2d_19_b.bin");
-  void *conv2d_19_b =
-      readTrainedWeightsCPU(conv2d_19_b_path.c_str(), 0, 1, 64, 1, 1);
-  std::string conv2d_20_w_path = dir_prefix + std::string("conv2d_20_w.bin");
-  void *conv2d_20_w =
-      readTrainedWeightsCPU(conv2d_20_w_path.c_str(), 0, 64, 64, 3, 3);
-  std::string conv2d_20_b_path = dir_prefix + std::string("conv2d_20_b.bin");
-  void *conv2d_20_b =
-      readTrainedWeightsCPU(conv2d_20_b_path.c_str(), 0, 1, 64, 1, 1);
-  std::string conv2d_21_w_path = dir_prefix + std::string("conv2d_21_w.bin");
-  void *conv2d_21_w =
-      readTrainedWeightsCPU(conv2d_21_w_path.c_str(), 0, 64, 64, 3, 3);
-  std::string conv2d_21_b_path = dir_prefix + std::string("conv2d_21_b.bin");
-  void *conv2d_21_b =
-      readTrainedWeightsCPU(conv2d_21_b_path.c_str(), 0, 1, 64, 1, 1);
-  std::string dense_1_w_path = dir_prefix + std::string("dense_1_w.bin");
-  void *dense_1_w =
-      readTrainedWeightsCPU(dense_1_w_path.c_str(), 0, 1, 1, 64, 10);
-  std::string dense_1_b_path = dir_prefix + std::string("dense_1_b.bin");
-  void *dense_1_b =
-      readTrainedWeightsCPU(dense_1_b_path.c_str(), 0, 1, 10, 1, 1);
-
-  int test_input_size = 10000;
-  int batch_size = 100;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  for (int i = 0; i < batch_count; i++) {
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-
-    void* input = readTrainedWeightsCPU(input_path.c_str(), 0,batch_size,3,32,32);
-    uint8_t *labels = readLabels(labels_path.c_str(), batch_size);
-    
-    void* var_2 = tensorConvolutionCPU(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-    void* var_3 = tensorAddCPU(var_2, conv2d_1_b); 
-    void* var_4 = tensorReluCPU(var_3); 
-    void* var_6 = tensorConvolutionCPU(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-    void* var_7 = tensorAddCPU(var_6, conv2d_2_b); 
-    void* var_8 = tensorReluCPU(var_7); 
-    void* var_10 = tensorConvolutionCPU(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_11 = tensorAddCPU(var_10, conv2d_3_b); 
-    void* var_12 = tensorAddCPU(var_4, var_11); 
-    void *var_13 = tensorReluCPU(var_12); 
-    void* var_15 = tensorConvolutionCPU(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_16 = tensorAddCPU(var_15, conv2d_4_b); 
-    void* var_17 = tensorReluCPU(var_16); 
-    void* var_19 = tensorConvolutionCPU(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_20 = tensorAddCPU(var_19, conv2d_5_b); 
-    void* var_21 = tensorAddCPU(var_13, var_20); 
-    void* var_22 = tensorReluCPU(var_21); 
-    void* var_24 = tensorConvolutionCPU(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-    void* var_25 = tensorAddCPU(var_24, conv2d_6_b); 
-    void *var_26 = tensorReluCPU(var_25); 
-    void* var_28 = tensorConvolutionCPU(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-    void* var_29 = tensorAddCPU(var_28, conv2d_7_b); 
-    void *var_30 = tensorAddCPU(var_22, var_29); 
-    void *var_31 = tensorReluCPU(var_30); 
-    void* var_33 = tensorConvolutionCPU(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-    void *var_34 = tensorAddCPU(var_33, conv2d_8_b);  
-    void *var_35 = tensorReluCPU(var_34); 
-    void *var_37 = tensorConvolutionCPU(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-    void* var_38 = tensorAddCPU(var_37, conv2d_9_b); 
-    void* var_40 = tensorConvolutionCPU(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-    void *var_41 = tensorAddCPU(var_40, conv2d_10_b); 
-    void* var_42 = tensorAddCPU(var_41, var_38); 
-    void* var_43 = tensorReluCPU(var_42); 
-    void* var_45 = tensorConvolutionCPU(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-    void* var_46 = tensorAddCPU(var_45, conv2d_11_b); 
-    void *var_47 = tensorReluCPU(var_46); 
-    void* var_49 = tensorConvolutionCPU(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-    void* var_50 = tensorAddCPU(var_49, conv2d_12_b); 
-    void* var_51 = tensorAddCPU(var_43, var_50); 
-    void* var_52 = tensorReluCPU(var_51); 
-    void* var_54 = tensorConvolutionCPU(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-    void* var_55 = tensorAddCPU(var_54, conv2d_13_b); 
-    void* var_56 = tensorReluCPU(var_55); 
-    void* var_58 = tensorConvolutionCPU(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-    void* var_59 = tensorAddCPU(var_58, conv2d_14_b); 
-    void* var_60 = tensorAddCPU(var_52, var_59); 
-    void* var_61 = tensorReluCPU(var_60); 
-    void* var_63 = tensorConvolutionCPU(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-    void* var_64 = tensorAddCPU(var_63, conv2d_15_b); 
-    void* var_65 = tensorReluCPU(var_64); 
-    void* var_67 = tensorConvolutionCPU(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-    void* var_68 = tensorAddCPU(var_67, conv2d_16_b); 
-    void* var_70 = tensorConvolutionCPU(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-    void* var_71 = tensorAddCPU(var_70, conv2d_17_b); 
-    void* var_72 = tensorAddCPU(var_71, var_68); 
-    void* var_73 = tensorReluCPU(var_72); 
-    void* var_75 = tensorConvolutionCPU(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-    void* var_76 = tensorAddCPU(var_75, conv2d_18_b); 
-    void* var_77 = tensorReluCPU(var_76); 
-    void* var_79 = tensorConvolutionCPU(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-    void* var_80 = tensorAddCPU(var_79, conv2d_19_b); 
-    void* var_81 = tensorAddCPU(var_73, var_80); 
-    void* var_82 = tensorReluCPU(var_81); 
-    void* var_84 = tensorConvolutionCPU(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-    void* var_85 = tensorAddCPU(var_84, conv2d_20_b); 
-    void* var_86 = tensorReluCPU(var_85); 
-    void* var_88 = tensorConvolutionCPU(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-    void* var_89 = tensorAddCPU(var_88, conv2d_21_b); 
-    void* var_90 = tensorAddCPU(var_82, var_89); 
-    void* var_91 = tensorReluCPU(var_90); 
-    void* var_92 = tensorPoolingCPU(var_91,1,8,8,0,0,8,8); 
-    void* var_94 = tensorGemmCPU(var_92, dense_1_w);
-    void* var_95 = tensorAddCPU(var_94, dense_1_b); 
-    void* var_96 = tensorSoftmaxCPU(var_95);
-
-    float accuracy = computeAccuracy2(labels,batch_size, var_96); 
-    final_accuracy += accuracy;
-  }
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_cudaperf.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_cudaperf.cc
deleted file mode 100644
index 2e33715e8c6972966e7359a1e7b8fc5069e1f16f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_cudaperf.cc
+++ /dev/null
@@ -1,221 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-  
-  std::string dir_prefix = std::string("../model_params/resnet18_cifar10_3/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0, batch_size,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(), batch_size); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-  void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-  std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-  void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-  std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-  void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-  void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-  void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-  void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-  void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-  void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 10000;
-  int batch_size = 2000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-    
-    void* var_2 = tensorConvPerfCuda(input, conv2d_1_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_3 = tensorAdd(var_2, conv2d_1_b); 
-    void* var_4 = tensorRelu(var_3); 
-    void* var_6 = tensorConvPerfCuda(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_7 = tensorAdd(var_6, conv2d_2_b); 
-    void* var_8 = tensorRelu(var_7); 
-    void* var_10 = tensorConvPerfCuda(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-    void* var_12 = tensorAdd(var_4, var_11); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_15 = tensorConvPerfCuda(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_16 = tensorAdd(var_15, conv2d_4_b); 
-    void* var_17 = tensorRelu(var_16); 
-    void* var_19 = tensorConvPerfCuda(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_20 = tensorAdd(var_19, conv2d_5_b); 
-    void* var_21 = tensorAdd(var_13, var_20); 
-    void* var_22 = tensorRelu(var_21); 
-    void* var_24 = tensorConvPerfCuda(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0, 3, 1, 2); 
-    void* var_25 = tensorAdd(var_24, conv2d_6_b); 
-    void* var_26 = tensorRelu(var_25); 
-    void* var_28 = tensorConvPerfCuda(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_29 = tensorAdd(var_28, conv2d_7_b); 
-    void* var_30 = tensorAdd(var_22, var_29); 
-    void* var_31 = tensorRelu(var_30); 
-    void* var_33 = tensorConvPerfCuda(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0, 1, 1, 0); 
-    void* var_34 = tensorAdd(var_33, conv2d_8_b); 
-    void* var_35 = tensorRelu(var_34); 
-    void* var_37 = tensorConvPerfCuda(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_38 = tensorAdd(var_37, conv2d_9_b); 
-    void* var_40 = tensorConvPerfCuda(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0, 1, 1, 0); 
-    void* var_41 = tensorAdd(var_40, conv2d_10_b); 
-    void* var_42 = tensorAdd(var_41, var_38); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_45 = tensorConvPerfCuda(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0, 3, 1, 0); 
-    void* var_46 = tensorAdd(var_45, conv2d_11_b); 
-    void* var_47 = tensorRelu(var_46); 
-    void* var_49 = tensorConvPerfCuda(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_50 = tensorAdd(var_49, conv2d_12_b); 
-    void* var_51 = tensorAdd(var_43, var_50); 
-    void* var_52 = tensorRelu(var_51); 
-    void* var_54 = tensorConvPerfCuda(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_55 = tensorAdd(var_54, conv2d_13_b); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_58 = tensorConvPerfCuda(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0, 1, 3, 1); 
-    void* var_59 = tensorAdd(var_58, conv2d_14_b); 
-    void* var_60 = tensorAdd(var_52, var_59); 
-    void* var_61 = tensorRelu(var_60); 
-    void* var_63 = tensorConvPerfCuda(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0, 1, 1, 0); 
-    void* var_64 = tensorAdd(var_63, conv2d_15_b); 
-    void* var_65 = tensorRelu(var_64); 
-    void* var_67 = tensorConvPerfCuda(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_68 = tensorAdd(var_67, conv2d_16_b); 
-    void* var_70 = tensorConvPerfCuda(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0, 3, 1, 2); 
-    void* var_71 = tensorAdd(var_70, conv2d_17_b); 
-    void* var_72 = tensorAdd(var_71, var_68); 
-    void* var_73 = tensorRelu(var_72); 
-    void* var_75 = tensorConvPerfCuda(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_76 = tensorAdd(var_75, conv2d_18_b); 
-    void* var_77 = tensorRelu(var_76); 
-    void* var_79 = tensorConvPerfCuda(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0, 1, 3, 0); 
-    void* var_80 = tensorAdd(var_79, conv2d_19_b); 
-    void* var_81 = tensorAdd(var_73, var_80); 
-    void* var_82 = tensorRelu(var_81); 
-    void* var_84 = tensorConvPerfCuda(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_85 = tensorAdd(var_84, conv2d_20_b); 
-    void* var_86 = tensorRelu(var_85); 
-    void* var_88 = tensorConvPerfCuda(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0, 1, 1, 0); 
-    void* var_89 = tensorAdd(var_88, conv2d_21_b); 
-    void* var_90 = tensorAdd(var_82, var_89); 
-    void* var_91 = tensorRelu(var_90); 
-    void* var_92 = tensorPooling(var_91,1,8,8,0,0,8,8); 
-    void* var_94 = tensorGemmGPU(var_92, dense_1_w); 
-    void* var_95 = tensorAdd(var_94, dense_1_b); 
-    void* var_96 = tensorSoftmax(var_95); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels,batch_size,var_96); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_half.cc
deleted file mode 100644
index f7c6593d525351085ee99606bc90fc1419980d8e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_half.cc
+++ /dev/null
@@ -1,194 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(1); 
-
-  int batch_size = 4000;
- 
-  std::string dir_prefix = std::string("../model_params/resnet18_cifar10_3/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  void* input = readTrainedWeights(input_path.c_str(), 0, batch_size,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  uint8_t* labels = readLabels(labels_path.c_str(), batch_size); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-  void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-  std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-  void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-  std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-  void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-  void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-  void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-  void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-  void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-  void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  void* var_2 = tensorHalfConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-  void* var_3 = tensorHalfAdd(var_2, conv2d_1_b); 
-  void* var_4 = tensorHalfRelu(var_3); 
-  void* var_6 = tensorHalfConvolution(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-  void* var_7 = tensorHalfAdd(var_6, conv2d_2_b); 
-  void* var_8 = tensorHalfRelu(var_7); 
-  void* var_10 = tensorHalfConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-  void* var_11 = tensorHalfAdd(var_10, conv2d_3_b); 
-  void* var_12 = tensorHalfAdd(var_4, var_11); 
-  void* var_13 = tensorHalfRelu(var_12); 
-  void* var_15 = tensorHalfConvolution(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-  void* var_16 = tensorHalfAdd(var_15, conv2d_4_b); 
-  void* var_17 = tensorHalfRelu(var_16); 
-  void* var_19 = tensorHalfConvolution(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-  void* var_20 = tensorHalfAdd(var_19, conv2d_5_b); 
-  void* var_21 = tensorHalfAdd(var_13, var_20); 
-  void* var_22 = tensorHalfRelu(var_21); 
-  void* var_24 = tensorHalfConvolution(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-  void* var_25 = tensorHalfAdd(var_24, conv2d_6_b); 
-  void* var_26 = tensorHalfRelu(var_25); 
-  void* var_28 = tensorHalfConvolution(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-  void* var_29 = tensorHalfAdd(var_28, conv2d_7_b); 
-  void* var_30 = tensorHalfAdd(var_22, var_29); 
-  void* var_31 = tensorHalfRelu(var_30); 
-  void* var_33 = tensorHalfConvolution(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-  void* var_34 = tensorHalfAdd(var_33, conv2d_8_b); 
-  void* var_35 = tensorHalfRelu(var_34); 
-  void* var_37 = tensorHalfConvolution(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-  void* var_38 = tensorHalfAdd(var_37, conv2d_9_b); 
-  void* var_40 = tensorHalfConvolution(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-  void* var_41 = tensorHalfAdd(var_40, conv2d_10_b); 
-  void* var_42 = tensorHalfAdd(var_41, var_38); 
-  void* var_43 = tensorHalfRelu(var_42); 
-  void* var_45 = tensorHalfConvolution(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-  void* var_46 = tensorHalfAdd(var_45, conv2d_11_b); 
-  void* var_47 = tensorHalfRelu(var_46); 
-  void* var_49 = tensorHalfConvolution(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-  void* var_50 = tensorHalfAdd(var_49, conv2d_12_b); 
-  void* var_51 = tensorHalfAdd(var_43, var_50); 
-  void* var_52 = tensorHalfRelu(var_51); 
-  void* var_54 = tensorHalfConvolution(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-  void* var_55 = tensorHalfAdd(var_54, conv2d_13_b); 
-  void* var_56 = tensorHalfRelu(var_55); 
-  void* var_58 = tensorHalfConvolution(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-  void* var_59 = tensorHalfAdd(var_58, conv2d_14_b); 
-  void* var_60 = tensorHalfAdd(var_52, var_59); 
-  void* var_61 = tensorHalfRelu(var_60); 
-  void* var_63 = tensorHalfConvolution(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-  void* var_64 = tensorHalfAdd(var_63, conv2d_15_b); 
-  void* var_65 = tensorHalfRelu(var_64); 
-  void* var_67 = tensorHalfConvolution(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-  void* var_68 = tensorHalfAdd(var_67, conv2d_16_b); 
-  void* var_70 = tensorHalfConvolution(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-  void* var_71 = tensorHalfAdd(var_70, conv2d_17_b); 
-  void* var_72 = tensorHalfAdd(var_71, var_68); 
-  void* var_73 = tensorHalfRelu(var_72); 
-  void* var_75 = tensorHalfConvolution(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-  void* var_76 = tensorHalfAdd(var_75, conv2d_18_b); 
-  void* var_77 = tensorHalfRelu(var_76); 
-  void* var_79 = tensorHalfConvolution(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-  void* var_80 = tensorHalfAdd(var_79, conv2d_19_b); 
-  void* var_81 = tensorHalfAdd(var_73, var_80); 
-  void* var_82 = tensorHalfRelu(var_81); 
-  void* var_84 = tensorHalfConvolution(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-  void* var_85 = tensorHalfAdd(var_84, conv2d_20_b); 
-  void* var_86 = tensorHalfRelu(var_85); 
-  void* var_88 = tensorHalfConvolution(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-  void* var_89 = tensorHalfAdd(var_88, conv2d_21_b); 
-  void* var_90 = tensorHalfAdd(var_82, var_89); 
-  void* var_91 = tensorHalfRelu(var_90); 
-  void* var_92 = tensorHalfPooling(var_91,1,8,8,0,0,8,8); 
-  void* var_94 = tensorHalfGemmGPU(var_92, dense_1_w); 
-  void* var_95 = tensorHalfAdd(var_94, dense_1_b); 
-  void* var_96 = tensorSoftmax(var_95); 
-
-  computeAccuracy2(labels, batch_size,var_96); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_inputapprox.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_inputapprox.cc
deleted file mode 100644
index 6634ce92c9aed0fbcc32e68580fb3171145ee297..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_inputapprox.cc
+++ /dev/null
@@ -1,221 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-  
-  std::string dir_prefix = std::string("../model_params/resnet18_cifar10_3/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  //void* input = readTrainedWeights(input_path.c_str(), 0, batch_size,3,32,32); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  //uint8_t* labels = readLabels(labels_path.c_str(), batch_size); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-  void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-  std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-  void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-  std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-  void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-  void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-  void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-  void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-  void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-  void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 10000;
-  int batch_size = 2000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-    
-    void* var_2 = tensorConvolutionKernelSamp(input, conv2d_1_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_3 = tensorAdd(var_2, conv2d_1_b); 
-    void* var_4 = tensorRelu(var_3); 
-    void* var_6 = tensorConvolutionKernelSamp(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_7 = tensorAdd(var_6, conv2d_2_b); 
-    void* var_8 = tensorRelu(var_7); 
-    void* var_10 = tensorConvolutionKernelSamp(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-    void* var_12 = tensorAdd(var_4, var_11); 
-    void* var_13 = tensorRelu(var_12); 
-    void* var_15 = tensorConvolutionKernelSamp(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_16 = tensorAdd(var_15, conv2d_4_b); 
-    void* var_17 = tensorRelu(var_16); 
-    void* var_19 = tensorConvolutionKernelSamp(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_20 = tensorAdd(var_19, conv2d_5_b); 
-    void* var_21 = tensorAdd(var_13, var_20); 
-    void* var_22 = tensorRelu(var_21); 
-    void* var_24 = tensorConvolutionKernelSamp(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0, 4); 
-    void* var_25 = tensorAdd(var_24, conv2d_6_b); 
-    void* var_26 = tensorRelu(var_25); 
-    void* var_28 = tensorConvolutionKernelSamp(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0, 4); 
-    void* var_29 = tensorAdd(var_28, conv2d_7_b); 
-    void* var_30 = tensorAdd(var_22, var_29); 
-    void* var_31 = tensorRelu(var_30); 
-    void* var_33 = tensorConvolutionKernelSamp(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0, 30); 
-    void* var_34 = tensorAdd(var_33, conv2d_8_b); 
-    void* var_35 = tensorRelu(var_34); 
-    void* var_37 = tensorConvolutionKernelSamp(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_38 = tensorAdd(var_37, conv2d_9_b); 
-    void* var_40 = tensorConvolutionKernelSamp(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0, 30); 
-    void* var_41 = tensorAdd(var_40, conv2d_10_b); 
-    void* var_42 = tensorAdd(var_41, var_38); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_45 = tensorConvolutionKernelSamp(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0, 4); 
-    void* var_46 = tensorAdd(var_45, conv2d_11_b); 
-    void* var_47 = tensorRelu(var_46); 
-    void* var_49 = tensorConvolutionKernelSamp(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_50 = tensorAdd(var_49, conv2d_12_b); 
-    void* var_51 = tensorAdd(var_43, var_50); 
-    void* var_52 = tensorRelu(var_51); 
-    void* var_54 = tensorConvolutionKernelSamp(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_55 = tensorAdd(var_54, conv2d_13_b); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_58 = tensorConvolutionKernelSamp(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_59 = tensorAdd(var_58, conv2d_14_b); 
-    void* var_60 = tensorAdd(var_52, var_59); 
-    void* var_61 = tensorRelu(var_60); 
-    void* var_63 = tensorConvolutionKernelSamp(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0, 30); 
-    void* var_64 = tensorAdd(var_63, conv2d_15_b); 
-    void* var_65 = tensorRelu(var_64); 
-    void* var_67 = tensorConvolutionKernelSamp(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_68 = tensorAdd(var_67, conv2d_16_b); 
-    void* var_70 = tensorConvolutionKernelSamp(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0, 30); 
-    void* var_71 = tensorAdd(var_70, conv2d_17_b); 
-    void* var_72 = tensorAdd(var_71, var_68); 
-    void* var_73 = tensorRelu(var_72); 
-    void* var_75 = tensorConvolutionKernelSamp(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_76 = tensorAdd(var_75, conv2d_18_b); 
-    void* var_77 = tensorRelu(var_76); 
-    void* var_79 = tensorConvolutionKernelSamp(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_80 = tensorAdd(var_79, conv2d_19_b); 
-    void* var_81 = tensorAdd(var_73, var_80); 
-    void* var_82 = tensorRelu(var_81); 
-    void* var_84 = tensorConvolutionKernelSamp(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_85 = tensorAdd(var_84, conv2d_20_b); 
-    void* var_86 = tensorRelu(var_85); 
-    void* var_88 = tensorConvolutionKernelSamp(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0, 30); 
-    void* var_89 = tensorAdd(var_88, conv2d_21_b); 
-    void* var_90 = tensorAdd(var_82, var_89); 
-    void* var_91 = tensorRelu(var_90); 
-    void* var_92 = tensorPooling(var_91,1,8,8,0,0,8,8); 
-    void* var_94 = tensorGemmGPU(var_92, dense_1_w); 
-    void* var_95 = tensorAdd(var_94, dense_1_b); 
-    void* var_96 = tensorSoftmax(var_95); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels,batch_size,var_96); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  stopProfiling();
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_tuner.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_tuner.cc
deleted file mode 100644
index 689e241c5b4a0a5e1c5b98326998f37d5e803f75..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/resnet18_cifar10_tuner.cc
+++ /dev/null
@@ -1,265 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-
-
-int main(int argc, char* argv[]){ 
-
-  int total_runs = 1;
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-  
-
-  llvm_hpvm_initTensorRt(0); 
-
-
-  /*int skip_tensor_ids[22];
-  skip_tensor_ids[0] = 0;
-  skip_tensor_ids[1] = 1;
-  skip_tensor_ids[2] = 1;
-  skip_tensor_ids[3] = 3;
-  skip_tensor_ids[4] = 4;
-  skip_tensor_ids[5] = 4;
-  skip_tensor_ids[6] = 10;
-  skip_tensor_ids[7] = 11;
-  skip_tensor_ids[8] = 17;
-  skip_tensor_ids[9] = 18;  
-  skip_tensor_ids[10] = 24;
-  skip_tensor_ids[11] = 25;
-  skip_tensor_ids[12] = 25;
-  skip_tensor_ids[13] = 33;
-  skip_tensor_ids[14] = 34;
-  skip_tensor_ids[15] = 35;
-  skip_tensor_ids[16] = 40;
-  skip_tensor_ids[17] = 41;
-  skip_tensor_ids[18] = 47;
-  skip_tensor_ids[19] = 48;  
-  //--- readSkipTensors(skip_tensor_ids, 22);
-  //-- readSkipTensors(skip_tensor_ids, 10);
-  readSkipTensors(skip_tensor_ids, 20);
-  */
-    
-  std::string dir_prefix = std::string("../model_params/resnet18_cifar10_promise/"); 
-  // std::string input_path =  dir_prefix + std::string("resnet18_calib.bin"); 
-  // std::string labels_path =  dir_prefix + std::string("resnet18_train_labels.bin");
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-  std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-  void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-  void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-  std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-  void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-  void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-  std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-  void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-  void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-  void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-  void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-  void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-  void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-  void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-  void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-  void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-  void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-  void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 500;
-  int batch_size = 500;
-  int offset = 5000;
-  
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  // NOTE: Starting time profiling
-  startProfiling();
-  
-
-  for(int j = 0; j < total_runs; j++){
-    
-    float final_accuracy = 0.0;
-    for(int i = 0; i < batch_count; i++){
-
-      int start = i * batch_size + offset;
-      int end = (i + 1) * batch_size + offset;
-    
-      void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32);
-    
-      void* var_2 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-      void* var_3 = tensorAdd(var_2, conv2d_1_b); 
-      void* var_4 = tensorRelu(var_3); 
-      void* var_6 = tensorConvolution(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-      void* var_7 = tensorAdd(var_6, conv2d_2_b); 
-      void* var_8 = tensorRelu(var_7); 
-      void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-      void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-      void* var_12 = tensorAdd(var_4, var_11); 
-      void* var_13 = tensorRelu(var_12); 
-      void* var_15 = tensorConvolution(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-      void* var_16 = tensorAdd(var_15, conv2d_4_b); 
-      void* var_17 = tensorRelu(var_16); 
-      void* var_19 = tensorConvolution(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-      void* var_20 = tensorAdd(var_19, conv2d_5_b); 
-      void* var_21 = tensorAdd(var_13, var_20); 
-      void* var_22 = tensorRelu(var_21); 
-      void* var_24 = tensorConvolution(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-      void* var_25 = tensorAdd(var_24, conv2d_6_b); 
-      void* var_26 = tensorRelu(var_25); 
-      void* var_28 = tensorConvolution(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-      void* var_29 = tensorAdd(var_28, conv2d_7_b); 
-      void* var_30 = tensorAdd(var_22, var_29); 
-      void* var_31 = tensorRelu(var_30); 
-      void* var_33 = tensorConvolution(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-      void* var_34 = tensorAdd(var_33, conv2d_8_b); 
-      void* var_35 = tensorRelu(var_34); 
-      void* var_37 = tensorConvolution(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-      void* var_38 = tensorAdd(var_37, conv2d_9_b); 
-      void* var_40 = tensorConvolution(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-      void* var_41 = tensorAdd(var_40, conv2d_10_b); 
-      void* var_42 = tensorAdd(var_41, var_38); 
-      void* var_43 = tensorRelu(var_42); 
-      void* var_45 = tensorConvolution(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-      void* var_46 = tensorAdd(var_45, conv2d_11_b); 
-      void* var_47 = tensorRelu(var_46); 
-      void* var_49 = tensorConvolution(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-      void* var_50 = tensorAdd(var_49, conv2d_12_b); 
-      void* var_51 = tensorAdd(var_43, var_50); 
-      void* var_52 = tensorRelu(var_51); 
-      void* var_54 = tensorConvolution(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-      void* var_55 = tensorAdd(var_54, conv2d_13_b); 
-      void* var_56 = tensorRelu(var_55); 
-      void* var_58 = tensorConvolution(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-      void* var_59 = tensorAdd(var_58, conv2d_14_b); 
-      void* var_60 = tensorAdd(var_52, var_59); 
-      void* var_61 = tensorRelu(var_60); 
-      void* var_63 = tensorConvolution(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-      void* var_64 = tensorAdd(var_63, conv2d_15_b); 
-      void* var_65 = tensorRelu(var_64); 
-      void* var_67 = tensorConvolution(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-      void* var_68 = tensorAdd(var_67, conv2d_16_b); 
-      void* var_70 = tensorConvolution(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-      void* var_71 = tensorAdd(var_70, conv2d_17_b); 
-      void* var_72 = tensorAdd(var_71, var_68); 
-      void* var_73 = tensorRelu(var_72); 
-      void* var_75 = tensorConvolution(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-      void* var_76 = tensorAdd(var_75, conv2d_18_b); 
-      void* var_77 = tensorRelu(var_76); 
-      void* var_79 = tensorConvolution(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-      void* var_80 = tensorAdd(var_79, conv2d_19_b); 
-      void* var_81 = tensorAdd(var_73, var_80); 
-      void* var_82 = tensorRelu(var_81); 
-      void* var_84 = tensorConvolution(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-      void* var_85 = tensorAdd(var_84, conv2d_20_b); 
-      void* var_86 = tensorRelu(var_85); 
-      void* var_88 = tensorConvolution(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-      void* var_89 = tensorAdd(var_88, conv2d_21_b); 
-      void* var_90 = tensorAdd(var_82, var_89); 
-      void* var_91 = tensorRelu(var_90); 
-      void* var_92 = tensorPooling(var_91,1,8,8,0,0,8,8); 
-      void* var_94 = tensorGemmGPU(var_92, dense_1_w); 
-      void* var_95 = tensorAdd(var_94, dense_1_b); 
-      void* var_96 = tensorSoftmax(var_95); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-      float accuracy = computeAccuracy2(labels,batch_size,var_96); 
-      final_accuracy += accuracy;
-    
-      freeBatchMemory();
-    }
-
-    stopProfiling();
-
-    final_accuracy = final_accuracy / batch_count;
-    dumpFinalAccuracy(final_accuracy);
-  }
-  
-  dumpExecutionAccuracies();
-
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/fc2_clipped_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/fc2_clipped_promise.cc
deleted file mode 100644
index d7addd7283e24bedfc32d57d84c4ce17d9966f57..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/fc2_clipped_promise.cc
+++ /dev/null
@@ -1,80 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-#include "../../include/types.h"
-
-
-
-void test_2_Layer_clipped_FC(){
-
-  printf("********* 2-Layer FC with clipped activations and weights ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;
-  
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-				   float_type, test_batch_size, 1, 28, 28);  
-  void* fc1_weights = readTrainedWeights("../model_params/fc2_clipped/fc1.bin",
-					 float_type, 1, 1, 784, 128);  
-  void* fc1_bias = readTrainedWeights("../model_params/fc2_clipped/fc1_bias.bin",
-				      float_type, 1, 128, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/fc2_clipped/fc2.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/fc2_clipped/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-  // Start execution profiling Tensor ops
-  startProfiling();
-  
-  // Layer-1
-  void* fc1out = tensorGemmGPU(input, fc1_weights);  
-  printTensorDims(fc1out);
-  dumpWeightsToFile("tensors_out2/fc1out.out", fc1out);  
-
-  void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-  printTensorDims(fc1_bias_out);
-  dumpWeightsToFile("tensors_out2/fc1_bias.out", fc1_bias_out);  
-
-  void* fc1_relu = tensorRelu2(fc1_bias_out, 0, 2);
-  printTensorDims(fc1_relu);
-  dumpWeightsToFile("tensors_out2/fc1_clipped_relu.out", fc1_relu);  
-
-  // Layer-2
-  void* fc2out = tensorGemmGPU(fc1_relu, fc2_weights);  
-  printTensorDims(fc2out);
-  dumpWeightsToFile("tensors_out2/fc2out.out", fc2out);  
-
-  
-  void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  printTensorDims(fc2_bias_out);
-
-  void* fc2_relu = tensorRelu2(fc2_bias_out, 0, 2);
-  printTensorDims(fc2_relu);
-
-  void* result = tensorSoftmax(fc2_relu);
-  printTensorDims(result);
-
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size, result);
-}
-
-
-
-int main(){
-
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  test_2_Layer_clipped_FC();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/test_lenet2.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/test_lenet2.cc
deleted file mode 100644
index d5211be3918adcd030fc40c13cba1ff0d7c53c18..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/test_lenet2.cc
+++ /dev/null
@@ -1,112 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenet2Arch(){
-
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet2_params/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet2_params/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet2_params/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet2_params/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet2_params/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet2_params/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet2_params/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet2_params/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  // Start power and performnce profiling 
-  startProfiling();
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-  // NOTE: 'SAME' convolution
-  void* conv1out = tensorConvolution(input, conv1_filter, 2, 2, 1, 1,
-				     conv_mode, conv_precision);
-
-  // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-  tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-  printTensorDims(conv1out);
-
-  void* conv1_reluout = tensorRelu(conv1out);
-  //dumpWeightsToFile("tensors_out/conv1_relu.out", conv1_reluout);  
-
-  void* pool1out = tensorPooling(conv1_reluout, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool1out);
-  //dumpWeightsToFile("tensors_out/pool1.out", pool1out);  
-  // NOTE: input channels have to match between tensor op inputs and outputs 
-  void* conv2out = tensorConvolution(pool1out, conv2_filter, 2, 2, 1, 1,
-				     conv_mode, conv_precision);
-  tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-  printTensorDims(conv2out);
-
-  void* conv2_reluout = tensorRelu(conv2out);
-  //dumpWeightsToFile("tensors_out/conv2.out", conv2_reluout);  
-
-  void* pool2out = tensorPooling(conv2_reluout, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool2out);
-  //dumpWeightsToFile("tensors_out/maxpool2.out", pool2out);  
-  
-  void* gemm1out = tensorGemmGPU(pool2out, fc1_weights);  
-  printTensorDims(gemm1out);
-  //dumpWeightsToFile("tensors_out/gemm1.out", gemm1out);  
-  
-  void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-  printTensorDims(gemm1biasout);
-
-  void* relu1out = tensorRelu(gemm1biasout);
-  printTensorDims(relu1out);
-  
-  void* gemm2out = tensorGemmGPU(relu1out, fc2_weights);  
-  printTensorDims(gemm2out);
-  
-  void* gemm2_biasout = tensorAdd(gemm2out, fc2_bias);
-  printTensorDims(gemm2_biasout);
-  
-  void* result = tensorSoftmax(gemm2_biasout);
-  printTensorDims(result);
-
-  // End profiling and dump output to profile.txt
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte",
-		  test_batch_size, result);
-  // THINK: I believe that comparing the results do not need to be part of the HPVM graph
-}
-
-
-int main(){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenet2Arch();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/test_lenet2_promise.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/test_lenet2_promise.cc
deleted file mode 100644
index 358cb6a75b8e63ca0a0bd964c9f73f2d16c39b4f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/test_lenet2_promise.cc
+++ /dev/null
@@ -1,113 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenet2Arch(){
-
-  printf("********* Lenet-2 Architecture ********** \n");
-
-  int test_batch_size = 10000;  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet2_params/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet2_params/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet2_params/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet2_params/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet2_params/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet2_params/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet2_params/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet2_params/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  // Start power and performnce profiling 
-  startProfiling();
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-  // NOTE: 'SAME' convolution
-  void* conv1out = tensorConvolution(input, conv1_filter, 2, 2, 1, 1,
-				     conv_mode, conv_precision);
-  dumpWeightsToFile("tensors_out/conv1_out.out", conv1out);  
-
-  tensorAdd(conv1out, conv1_bias);  // NOTE: In-place operation
-  printTensorDims(conv1out);
-
-  dumpWeightsToFile("tensors_out/conv1_bias_add.out", conv1out);  
-
-  void* conv1_reluout = tensorRelu(conv1out);
-  dumpWeightsToFile("tensors_out/conv1_relu.out", conv1_reluout);  
-
-  void* pool1out = tensorPooling(conv1_reluout, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool1out);
-  dumpWeightsToFile("tensors_out/conv1_pool.out", pool1out);
-  
-  // NOTE: input channels have to match between tensor op inputs and outputs 
-  void* conv2out = tensorConvolution(pool1out, conv2_filter, 2, 2, 1, 1,
-				     conv_mode, conv_precision);
-  tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-  printTensorDims(conv2out);
-
-  void* conv2_reluout = tensorRelu(conv2out);
-  //dumpWeightsToFile("tensors_out/conv2.out", conv2_reluout);  
-
-  void* pool2out = tensorPooling(conv2_reluout, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool2out);
-  //dumpWeightsToFile("tensors_out/maxpool2.out", pool2out);  
-  
-  void* gemm1out = tensorGemmGPU(pool2out, fc1_weights);  
-  printTensorDims(gemm1out);
-  //dumpWeightsToFile("tensors_out/gemm1.out", gemm1out);  
-  
-  void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-  printTensorDims(gemm1biasout);
-
-  void* relu1out = tensorRelu(gemm1biasout);
-  printTensorDims(relu1out);
-  
-  void* gemm2out = tensorGemmGPU(relu1out, fc2_weights);  
-  printTensorDims(gemm2out);
-  
-  void* gemm2_biasout = tensorAdd(gemm2out, fc2_bias);
-  printTensorDims(gemm2_biasout);
-  
-  void* result = tensorSoftmax(gemm2_biasout);
-  printTensorDims(result);
-
-  // End profiling and dump output to profile.txt
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte",
-		  test_batch_size, result);
-}
-
-
-int main(){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenet2Arch();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/test_lenet_acc.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/test_lenet_acc.cc
deleted file mode 100644
index 42e364289e499d92591692a04e42988fd1a66dc5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test/test_lenet_acc.cc
+++ /dev/null
@@ -1,109 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../../tensor_runtime/include/tensor_runtime.h"
-#include "../../include/utils.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenet2Arch(){
-
-  printf("********* Lenet-2 Architecture ********** \n");
-
-  int test_batch_size = 1000;  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_test_params/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_test_params/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_test_params/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_test_params/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_test_params/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet_test_params/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_test_params/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet_test_params/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  // Start power and performnce profiling 
-  startProfiling();
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-  // NOTE: 'SAME' convolution
-  void* conv1out = tensorConvolution(input, conv1_filter, 2, 2, 1, 1,
-				     conv_mode, conv_precision);
-
-  // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-  tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-  printTensorDims(conv1out);
-
-  void* pool1out = tensorPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool1out);
-  //dumpWeightsToFile("tensors_out/pool1.out", pool1out);  
-  // NOTE: input channels have to match between tensor op inputs and outputs 
-  void* conv2out = tensorConvolution(pool1out, conv2_filter, 2, 2, 1, 1,
-				     conv_mode, conv_precision);
-  tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-  printTensorDims(conv2out);
-
-  //void* conv2_reluout = tensorRelu(conv2out);
-
-  void* pool2out = tensorPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool2out);
-  //dumpWeightsToFile("tensors_out/maxpool2.out", pool2out);  
-  
-  void* gemm1out = tensorGemmGPU(pool2out, fc1_weights);  
-  printTensorDims(gemm1out);
-  //dumpWeightsToFile("tensors_out/gemm1.out", gemm1out);  
-  
-  void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-  printTensorDims(gemm1biasout);
-
-  void* relu1out = tensorRelu(gemm1biasout);
-  printTensorDims(relu1out);
-  
-  void* gemm2out = tensorGemmGPU(relu1out, fc2_weights);  
-  printTensorDims(gemm2out);
-  
-  void* gemm2_biasout = tensorAdd(gemm2out, fc2_bias);
-  printTensorDims(gemm2_biasout);
-  
-  void* result = tensorSoftmax(gemm2_biasout);
-  printTensorDims(result);
-
-  // End profiling and dump output to profile.txt
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte",
-		  test_batch_size, result);
-  // THINK: I believe that comparing the results do not need to be part of the HPVM graph
-}
-
-
-int main(){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenet2Arch();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_alexnet.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_alexnet.cc
deleted file mode 100644
index a8129a1e459a15e26f595972724451e01d81b0a1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_alexnet.cc
+++ /dev/null
@@ -1,480 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-// FIXIT: Data allocations may need to organized - Alexnet may consume more than available mem
-
-/*void testAlexnet2(){
-
-  struct Tensor* input = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 100, 3, 224, 224);
-  initTensorValues(input);
-
-  struct Tensor* conv1filter = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 96, 3, 11, 11);
-  printTensorDims(conv1filter);
-
-  /****** Start of Layer 1 ***************
-  
-  // NOTE: Padding for same conv is computed as P = (F - 1 /2)
-  struct Tensor* conv1out = tensorConvolution(input, conv1filter, 5, 5, 4, 4);
-  printTensorDims(conv1out);
-
-  struct Tensor* conv1bias = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 96, 1, 1);
-  struct Tensor* conv1bias_out = tensorAdd(conv1out, conv1bias); 
-  
-  struct Tensor* relu1out = tensorRelu(conv1bias_out);
-  
-  // NOTE: These parameters are a deviation from the original paper
-  // The parameters match the alexnet TF model
-  // TODO: Try removing LRN and measure - seems like impact of LRN may be minimal
-  unsigned int LRN_window = 5;
-  double LRN_alpha = 2e-05;
-  double LRN_beta = 0.75;
-  double LRN_k = 1.0; 
-  struct Tensor* lrn1out = tensorLRN(relu1out, LRN_window, LRN_alpha, LRN_beta, LRN_k);
-  printTensorDims(lrn1out);
-
-  struct Tensor* maxpool1out = tensorPooling(lrn1out, 0, 3, 3, 0, 0, 2, 2);
-  
-  /****** End of Conv Layer 1 **********
-
-  struct Tensor** splits = tensorSplit(maxpool1out, 2, 1);
- 
-  struct Tensor* conv2W = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 256, 48, 5, 5);
-  struct Tensor** conv2fils = tensorSplit(conv2W, 2, 0);
-
-  struct Tensor* conv2a_out = tensorConvolution(splits[0], conv2fils[0], 2, 2, 1, 1);
-  printTensorDims(conv2a_out);
-
-  struct Tensor* conv2b_out = tensorConvolution(splits[1], conv2fils[1], 2, 2, 1, 1);
-  printTensorDims(conv2b_out);
- 
-  struct Tensor* conv2_outs[2];
-  conv2_outs[0] = conv2a_out;
-  conv2_outs[1] = conv2b_out;
-
-  struct Tensor* conv2_concat_out = tensorConcat(conv2_outs, 2, 1);
-  printTensorDims(conv2_concat_out);
-
-  struct Tensor* conv2bias = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 256, 1, 1);
-  struct Tensor* conv2bias_out = tensorAdd(conv2_concat_out, conv2bias); 
-  struct Tensor* relu2out = tensorRelu(conv2bias_out);  
-  struct Tensor* lrn2out = tensorLRN(relu2out, LRN_window, LRN_alpha, LRN_beta, LRN_k);
-  printTensorDims(lrn2out);
-    
-  struct Tensor* maxpool2out = tensorPooling(lrn2out, 0, 3, 3, 0, 0, 2, 2);
-  printTensorDims(maxpool2out);
-
-  /******** End of Conv Layer 2 ************
-
-  struct Tensor* conv3filter = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 384, 256, 3, 3);
-  struct Tensor* conv3_out = tensorConvolution(maxpool2out, conv3filter, 1, 1, 1, 1);
-  
-  struct Tensor* conv3bias = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 384, 1, 1);
-  struct Tensor* conv3bias_out = tensorAdd(conv3_out, conv3bias); 
-  struct Tensor* relu3out = tensorRelu(conv3bias_out);  
-  printTensorDims(relu3out);
-
-  /********* End of Conv layer 3 ******
-
-  struct Tensor** splits2 = tensorSplit(relu3out, 2, 1);
-
-  struct Tensor* conv4W = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 384, 192, 3, 3);
-  struct Tensor** conv4fils = tensorSplit(conv4W, 2, 0);
-
-  printTensorDims(splits2[0]);
-  printTensorDims(conv4fils[0]);
-  
-  struct Tensor* conv4a_out = tensorConvolution(splits2[0], conv4fils[0], 1, 1, 1, 1);
-  printTensorDims(conv4a_out);
-
-  struct Tensor* conv4b_out = tensorConvolution(splits2[1], conv4fils[1], 1, 1, 1, 1);
-  printTensorDims(conv4b_out);
- 
-  struct Tensor* conv4_outs[2];
-  conv4_outs[0] = conv4a_out;
-  conv4_outs[1] = conv4b_out;
-
-  struct Tensor* conv4_concat_out = tensorConcat(conv4_outs, 2, 1);
-  printTensorDims(conv4_concat_out);
-
-  struct Tensor* conv4bias = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 384, 1, 1);
-  struct Tensor* conv4bias_out = tensorAdd(conv4_concat_out, conv4bias); 
-  struct Tensor* relu4out = tensorRelu(conv4bias_out);  
-  printTensorDims(relu4out);
-  
-  /********* End of Conv layer 4 ******
-
-  struct Tensor** splits3 = tensorSplit(relu4out, 2, 1);
-
-  struct Tensor* conv5W = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 256, 192, 3, 3);
-  struct Tensor** conv5fils = tensorSplit(conv5W, 2, 0);
-
-  printTensorDims(splits3[0]);
-  printTensorDims(conv5fils[0]);
-  
-  struct Tensor* conv5a_out = tensorConvolution(splits3[0], conv5fils[0], 1, 1, 1, 1);
-  printTensorDims(conv5a_out);
-
-  struct Tensor* conv5b_out = tensorConvolution(splits3[1], conv5fils[1], 1, 1, 1, 1);
-  printTensorDims(conv5b_out);
- 
-  struct Tensor* conv5_outs[2];
-  conv5_outs[0] = conv5a_out;
-  conv5_outs[1] = conv5b_out;
-
-  struct Tensor* conv5_concat_out = tensorConcat(conv5_outs, 2, 1);
-  printTensorDims(conv5_concat_out);
-
-  struct Tensor* conv5bias = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 256, 1, 1);
-  struct Tensor* conv5bias_out = tensorAdd(conv5_concat_out, conv5bias); 
-  struct Tensor* relu5out = tensorRelu(conv5bias_out);  
-  printTensorDims(relu5out);
-
-  struct Tensor* maxpool5out = tensorPooling(relu5out, 0, 3, 3, 0, 0, 2, 2);
-  printTensorDims(maxpool5out);
-
-  /********* End of Conv layer 5 ******
-
-  struct Tensor* fc1_weights = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW,
-					      1, 1, 256*6*6, 4096);
-  struct Tensor* gemm1out = tensorGemm(maxpool5out, fc1_weights);  
-  printTensorDims(gemm1out);
-
-  struct Tensor* bias = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW,
-				       1, 1, 1, 4096);
-  struct Tensor* gemm1biasout = tensorGemmBias(gemm1out, bias);
-  printTensorDims(gemm1biasout);
-
-  struct Tensor* relu6out = tensorRelu(gemm1biasout);  
-  printTensorDims(relu6out);
-
-  /***** End of FC1 layer ********
-
-  struct Tensor* fc2_weights = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW,
-					      1, 1, 4096, 4096);
-  struct Tensor* gemm2out = tensorGemm(relu6out, fc2_weights);  
-  printTensorDims(gemm2out);
-
-  struct Tensor* bias2 = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW,
-				       1, 1, 1, 4096);
-  struct Tensor* gemm2biasout = tensorGemmBias(gemm2out, bias2);
-  printTensorDims(gemm2biasout);
-
-  struct Tensor* relu7out = tensorRelu(gemm2biasout);  
-  printTensorDims(relu7out);
-
-  /***** End of FC2 layer ********
-
-  struct Tensor* fc3_weights = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW,
-					      1, 1, 4096, 1000);
-  struct Tensor* gemm3out = tensorGemm(relu7out, fc3_weights);  
-  printTensorDims(gemm3out);
-
-  struct Tensor* bias3 = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW,
-				       1, 1, 1, 1000);
-  struct Tensor* gemm3biasout = tensorGemmBias(gemm3out, bias3);
-  printTensorDims(gemm3biasout);
-
-  /******** End of FC3 Layer **********
-  struct Tensor* result = tensorSoftmax(gemm3biasout);
-  printTensorDims(result);
-  
-} */
-
-
-
-void printLikelihood(char* labels_file, int num_labels, void* result_ptr){
-
-  struct Tensor* result = (struct Tensor*) result_ptr;
-  
-  size_t batch_dim = result->dims.dim_sizes[0];
-  size_t channels = result->dims.dim_sizes[1];
-  float* data = (float*) result->host_data;
-  
-  for(int i = 0; i < batch_dim; i++){
-    int chosen = 0;
-    for (int id = 1; id < channels; ++id){
-      if (data[i * channels + chosen] < data[i * channels + id]) chosen = id;
-    }
-
-    printf("** chosen = %d, label = %f, label+3 = %f \n",
-	   chosen, data[chosen], data[chosen+3]);   
-  }
-
-  //float accuracy = ((batch_dim - num_errors) * 1.0 / batch_dim * 1.0) * 100.0;
-  //printf("****** Accuracy = %f \n\n", accuracy);  
-}
-
-
-//--- Results not matching
-// *** CHECK:
-// 1) cudnnCrossCorrelation vs cudnnConvolution
-// 2) Weights
-// 3) Tensor outputs
-// 4) Data layouts
-
-
-
-/*** NOTE: REFERECNCE ARCHITECTURE **/
-// FIXIT: Data allocations may need to organized - Alexnet may consume more than available mem
-void testAlexnet3(){
-
-  int test_batch_size = 2;
-  int conv_mode = 1; // CROSS_CORRELATION matches the TF conv2d implementation
-  int conv_precision = 0; // floating point precision for convolution
- 
-  printf("****** AlexNet Architecture 3 ******** \n\n");
-  void* input = readTrainedWeights("../alexnet/params/combined_imgs.bin",
-				   CUDNN_DATA_FLOAT, test_batch_size, 3, 227, 227);
-  dumpWeightsToFile("tensors_out/input.out", input); 
-    
-  /****** Start of Layer 1 ****************/  
-  void* conv1filter = readTrainedWeights("../alexnet/params/conv1.bin",
-					 CUDNN_DATA_FLOAT, 96, 3, 11, 11);
-
-  printTensorDims(conv1filter);
-  dumpWeightsToFile("tensors_out/conv1filter.out", conv1filter); 
-  
-  // NOTE: the trained model does NOT have any padding in this conv
-  void* conv1out = tensorConvolution(input, conv1filter, 4, 4, 4, 4,
-				     conv_mode, conv_precision);
-  printTensorDims(conv1out);
-  
-  void* conv1bias = readTrainedWeights("../alexnet/params/conv1.bias.bin",
-				       CUDNN_DATA_FLOAT, 1, 96, 1, 1);
-  void* conv1bias_out = tensorAdd(conv1out, conv1bias);
-
-  dumpWeightsToFile("tensors_out/conv1_init.out", conv1out);
-   
-  void* relu1out = tensorRelu(conv1bias_out);
-  printTensorDims(relu1out);
-  dumpWeightsToFile("tensors_out/conv1.out", relu1out);
-  
-  // NOTE: These parameters are a deviation from the original paper
-  // The parameters match the alexnet TF model
-  // TODO: Try removing LRN and measure - seems like impact of LRN may be minimal
-  unsigned int LRN_window = 5;
-  double LRN_alpha = 2e-05 * LRN_window;
-  double LRN_beta = 0.75;
-  double LRN_k = 1.0;
-
-   // TEST-point - Compare TF vs CUDNN
-  void* lrn1out = tensorLRN(relu1out, LRN_window, LRN_alpha, LRN_beta, LRN_k);
-  printTensorDims(lrn1out);
-  dumpWeightsToFile("tensors_out/lrn1.out", lrn1out);
-    
-  void* maxpool1out = tensorPooling(lrn1out, 0, 3, 3, 0, 0, 2, 2);
-  printTensorDims(maxpool1out);  
-  dumpWeightsToFile("tensors_out/maxpool1.out", maxpool1out);
-  
-  /****** End of Conv Layer 1 ***********/
-  
-  // TEST-point
-  void** splits = tensorSplit(maxpool1out, 2, 1);
-
-  void* concat_test1 = tensorConcat(splits, 2, 1);
-  compareTensors(maxpool1out, concat_test1);
-  
-  void* conv2W = readTrainedWeights("../alexnet/params/conv2.bin",
-				    CUDNN_DATA_FLOAT, 256, 48, 5, 5);
-
-  dumpWeightsToFile("tensors_out/conv2filter.out", conv2W); 
-  
-  // TEST point - compare split convolution across TF vs cuDNN
-  void** conv2fils = tensorSplit(conv2W, 2, 0);
-
-  void* concat_test2 = tensorConcat(conv2fils, 2, 0);
-  compareTensors(conv2W, concat_test2);
-  
-  // NOTE: Padding for same conv is computed as P = ((F - 1) / 2)
-  void* conv2a_out = tensorConvolution(splits[0], conv2fils[0], 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-  printTensorDims(conv2a_out);
-
-  void* conv2b_out = tensorConvolution(splits[1], conv2fils[1], 2, 2, 1, 1,
-				       conv_mode, conv_precision);
-  printTensorDims(conv2b_out);
- 
-  void* conv2_outs[2];
-  conv2_outs[0] = conv2a_out;
-  conv2_outs[1] = conv2b_out;
-
-  // Test point
-  void* conv2_concat_out = tensorConcat(conv2_outs, 2, 1);
-  printTensorDims(conv2_concat_out);
-  dumpWeightsToFile("tensors_out/conv2_init.out", conv2_concat_out); 
-  
-  void* conv2bias = readTrainedWeights("../alexnet/params/conv2.bias.bin",
-				       CUDNN_DATA_FLOAT, 1, 256, 1, 1);  
-  void* conv2bias_out = tensorAdd(conv2_concat_out, conv2bias);
-  printTensorDims(conv2bias_out);
-
-  dumpWeightsToFile("tensors_out/conv2_bias_init.out", conv2bias_out); 
-
-  void* relu2out = tensorRelu(conv2bias_out);
-  dumpWeightsToFile("tensors_out/conv2.out", relu2out); 
-  printTensorDims(relu2out);
- 
-  void* lrn2out = tensorLRN(relu2out, LRN_window, LRN_alpha, LRN_beta, LRN_k);
-  printTensorDims(lrn2out);
-    
-  void* maxpool2out = tensorPooling(lrn2out, 0, 3, 3, 0, 0, 2, 2);
-  printTensorDims(maxpool2out);
-  
-  /******** End of Conv Layer 2 *************/
-
-  void* conv3filter = readTrainedWeights("../alexnet/params/conv3.bin",
-					 CUDNN_DATA_FLOAT, 384, 256, 3, 3);   
-  void* conv3_out = tensorConvolution(maxpool2out, conv3filter, 1, 1, 1, 1,
-				      conv_mode, conv_precision);
-  
-  void* conv3bias = readTrainedWeights("../alexnet/params/conv3.bias.bin",
-				       CUDNN_DATA_FLOAT, 1, 384, 1, 1);
-  void* conv3bias_out = tensorAdd(conv3_out, conv3bias); 
-  void* relu3out = tensorRelu(conv3bias_out);
-  dumpWeightsToFile("tensors_out/conv3.out", relu3out);  
-  printTensorDims(relu3out);
-
-  /********* End of Conv layer 3 *******/
-
-  void** splits2 = tensorSplit(relu3out, 2, 1);
-
-  void* conv4W = readTrainedWeights("../alexnet/params/conv4.bin",
-				    CUDNN_DATA_FLOAT, 384, 192, 3, 3);   
-  void** conv4fils = tensorSplit(conv4W, 2, 0);
-
-  printTensorDims(splits2[0]);
-  printTensorDims(conv4fils[0]);
-
-  // Test-point DOES the pairing of splits and filters make sense?
-  void* conv4a_out = tensorConvolution(splits2[0], conv4fils[0], 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-  printTensorDims(conv4a_out);
-
-  void* conv4b_out = tensorConvolution(splits2[1], conv4fils[1], 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-  printTensorDims(conv4b_out);
- 
-  void* conv4_outs[2];
-  conv4_outs[0] = conv4a_out;
-  conv4_outs[1] = conv4b_out;
-
-  void* conv4_concat_out = tensorConcat(conv4_outs, 2, 1);
-  printTensorDims(conv4_concat_out);
-
-  void* conv4bias = readTrainedWeights("../alexnet/params/conv4.bias.bin",
-						  CUDNN_DATA_FLOAT, 1, 384, 1, 1);
-  void* conv4bias_out = tensorAdd(conv4_concat_out, conv4bias);
-
-  void* relu4out = tensorRelu(conv4bias_out);  
-  printTensorDims(relu4out);
-  
-  /********* End of Conv layer 4 *******/
-
-  void** splits3 = tensorSplit(relu4out, 2, 1);
-
-  void* conv5W = readTrainedWeights("../alexnet/params/conv5.bin",
-					     CUDNN_DATA_FLOAT, 256, 192, 3, 3);  
-  void** conv5fils = tensorSplit(conv5W, 2, 0);
-
-  printTensorDims(splits3[0]);
-  printTensorDims(conv5fils[0]);
-  
-  void* conv5a_out = tensorConvolution(splits3[0], conv5fils[0], 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-  printTensorDims(conv5a_out);
-
-  void* conv5b_out = tensorConvolution(splits3[1], conv5fils[1], 1, 1, 1, 1,
-				       conv_mode, conv_precision);
-  printTensorDims(conv5b_out);
- 
-  void* conv5_outs[2];
-  conv5_outs[0] = conv5a_out;
-  conv5_outs[1] = conv5b_out;
-
-  void* conv5_concat_out = tensorConcat(conv5_outs, 2, 1);
-  printTensorDims(conv5_concat_out);
-
-  void* conv5bias = readTrainedWeights("../alexnet/params/conv5.bias.bin",
-				       CUDNN_DATA_FLOAT, 1, 256, 1, 1);
-  void* conv5bias_out = tensorAdd(conv5_concat_out, conv5bias); 
-  void* relu5out = tensorRelu(conv5bias_out);  
-  printTensorDims(relu5out);
-
-  void* maxpool5out = tensorPooling(relu5out, 0, 3, 3, 0, 0, 2, 2);
-  printTensorDims(maxpool5out);
-
-  /********* End of Conv layer 5 *******/
-
-  // Test-point: I suspect the data may not be layed out correct (either in file or after loading)
-  void* fc1_weights = readTrainedWeights("../alexnet/params/fc1.bin",
-					 CUDNN_DATA_FLOAT, 1, 1, 256*6*6, 4096);
-  void* gemm1out = tensorGemm(maxpool5out, fc1_weights);  
-  printTensorDims(gemm1out);
-
-  void* bias = readTrainedWeights("../alexnet/params/fc1.bias.bin",
-		                           CUDNN_DATA_FLOAT, 1, 1, 1, 4096);
-    
-  void* gemm1biasout = tensorGemmBias(gemm1out, bias);
-  printTensorDims(gemm1biasout);
-
-  void* relu6out = tensorRelu(gemm1biasout);  
-  printTensorDims(relu6out);
-
-  /***** End of FC1 layer *********/
-
-  void* fc2_weights = readTrainedWeights("../alexnet/params/fc2.bin",
-						  CUDNN_DATA_FLOAT, 1, 1, 4096, 4096);
-  void* gemm2out = tensorGemm(relu6out, fc2_weights);  
-  printTensorDims(gemm2out);
-
-  void* bias2 = readTrainedWeights("../alexnet/params/fc2.bias.bin",
-					    CUDNN_DATA_FLOAT, 1, 1, 1, 4096);
-  void* gemm2biasout = tensorGemmBias(gemm2out, bias2);
-  printTensorDims(gemm2biasout);
-
-  void* relu7out = tensorRelu(gemm2biasout);  
-  printTensorDims(relu7out);
-
-  /***** End of FC2 layer *********/
-
-  void* fc3_weights = readTrainedWeights("../alexnet/params/fc3.bin",
-						  CUDNN_DATA_FLOAT, 1, 1, 4096, 1000);  
-  void* gemm3out = tensorGemm(relu7out, fc3_weights);  
-  printTensorDims(gemm3out);
-
-  void* bias3 = readTrainedWeights("../alexnet/params/fc3.bias.bin",
-				            CUDNN_DATA_FLOAT, 1, 1, 1, 1000);
-  void* gemm3biasout = tensorGemmBias(gemm3out, bias3);
-  printTensorDims(gemm3biasout);
-
-  /******** End of FC3 Layer ***********/
-  void* result = tensorSoftmax(gemm3biasout);
-  printTensorDims(result);
-
-  // FIXIT: Pass file with the labels
-  printLikelihood("", test_batch_size, result);
-  // THINK: I believe that comparing the results do not need to be part of the HPVM graph
-  printf("END of Alexnet3 -- \n");  
-}
-
-
-
-
-
-int main(){
-
-  // IMP-NOTE: Always initialize the runtime
-  initializeRuntime(0);
-
-  //testAlexnet1();
-  //testAlexnet2();
-  testAlexnet3(); 
-
-  return 0;
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_half.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_half.cc
deleted file mode 100644
index c0fee9b659db9ff45f56b75b989fbbed68523d43..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_half.cc
+++ /dev/null
@@ -1,74 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-#include "../include/types.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testFC_half(){
-
-  printf("********* Fully Connected DNN-1 ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000; 
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-  					    float_type, test_batch_size, 1, 28, 28);  
-
-  void* fc1_weights = readTrainedWeights("../model_params/FC_network2/fc1.bin",
-						  float_type, 1, 1, 784, 128);  
-  void* fc1_bias = readTrainedWeights("../model_params/FC_network2/fc1_bias.bin",
-					       float_type, 1, 128, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/FC_network2/fc2.bin",
-						  float_type, 1, 1, 128, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/FC_network2/fc2_bias.bin",
-					       float_type, 1, 10, 1, 1);  
-
-  // Start execution profiling Tensor ops
-  startProfiling();
-  
-  // Layer-1
-  void* fc1out = tensorHgemm(input, fc1_weights);  
-  printTensorDims(fc1out);
-  
-  void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-  printTensorDims(fc1_bias_out);
-
-  void* fc1_relu = tensorRelu(fc1_bias_out);
-  printTensorDims(fc1_relu);
-  
-  // Layer-2
-  void* fc2out = tensorHgemm(fc1_relu, fc2_weights);  
-  printTensorDims(fc2out);
-  
-  void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  printTensorDims(fc2_bias_out);
-
-  void* fc2_relu = tensorRelu(fc2_bias_out);
-  printTensorDims(fc2_relu);
-
-  void* result = tensorSoftmax(fc2_relu);
-  printTensorDims(result);
-
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte",
-		  test_batch_size, result);
-}
-
-
-
-int main(){
-
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  testFC_half();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_network.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_network.cc
deleted file mode 100644
index e8b70146a10359bf2df7420ae388325e6a658557..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_network.cc
+++ /dev/null
@@ -1,152 +0,0 @@
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-#include "../include/types.h"
-
-
-
-void testFCNetworkArchCPU(){
-
-  printf("********* Fully Connected DNN-1 ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;
- 
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-  					    float_type, test_batch_size, 1, 28, 28);  
-  void* fc1_weights = readTrainedWeights("../model_params/FC_network2/fc1.bin",
-						  float_type, 1, 1, 784, 128);  
-  void* fc1_bias = readTrainedWeights("../model_params/FC_network2/fc1_bias.bin",
-					       float_type, 1, 128, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/FC_network2/fc2.bin",
-						  float_type, 1, 1, 128, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/FC_network2/fc2_bias.bin",
-					       float_type, 1, 10, 1, 1);  
-
-  //dumpWeightsToFile("tensors_out/input_fc.out", input);
-  //dumpWeightsToFile("tensors_out/fc1_w_fc.out", fc1_weights);  
-
-  printTensorDims(input);
-  printTensorDims(fc1_weights);
-
-  // Start profiling tensor ops
-  startProfiling();
-  
-  // Layer-1
-  void* fc1out = tensorGemmCPU(input, fc1_weights);  
-  printTensorDims(fc1out);
-  //dumpWeightsToFile("tensors_out/fc1out_fc.out", fc1out);  
-  
-  void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-  //dumpWeightsToFile("tensors_out/fc1_biasout_fc.out", fc1_bias_out);  
-  printTensorDims(fc1_bias_out);
-
-  void* fc1_relu = tensorRelu(fc1_bias_out);
-  //dumpWeightsToFile("tensors_out/fc1_relu_fc.out", fc1_relu);  
-  printTensorDims(fc1_relu);
- 
-  // Layer-2
-  void* fc2out = tensorGemmCPU(fc1_relu, fc2_weights);  
-  //dumpWeightsToFile("tensors_out/fc2out_fc.out", fc2out);  
-  printTensorDims(fc2out);
-  
-  void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  //dumpWeightsToFile("tensors_out/fc2_biasout_fc.out", fc2_bias_out);  
-  printTensorDims(fc2_bias_out);
-
-  void* fc2_relu = tensorRelu(fc2_bias_out);
-  //dumpWeightsToFile("tensors_out/fc2_relu_fc.out", fc2_relu);  
-  printTensorDims(fc2_relu);
-
-  void* result = tensorSoftmax(fc2_relu);
-  printTensorDims(result);
-
-  // stopProfiling
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size, result);
-  // THINK: I believe that comparing the results do not need to be part of the HPVM graph
-}
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testFCNetworkArchGPU(){
-
-  printf("********* Fully Connected DNN-1 ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000; 
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-  					    float_type, test_batch_size, 1, 28, 28);  
-
-  void* fc1_weights = readTrainedWeights("../model_params/FC_network2/fc1.bin",
-						  float_type, 1, 1, 784, 128);  
-  void* fc1_bias = readTrainedWeights("../model_params/FC_network2/fc1_bias.bin",
-					       float_type, 1, 128, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/FC_network2/fc2.bin",
-						  float_type, 1, 1, 128, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/FC_network2/fc2_bias.bin",
-					       float_type, 1, 10, 1, 1);  
-
-  // Start execution profiling Tensor ops
-  startProfiling();
-  
-  // Layer-1
-  void* fc1out = tensorGemmGPU(input, fc1_weights);  
-  printTensorDims(fc1out);
-  
-  void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-  //dumpWeightsToFile("tensors_out/fc1_biasout_fc.out", fc1_bias_out);  
-  printTensorDims(fc1_bias_out);
-
-  void* fc1_relu = tensorRelu(fc1_bias_out);
-  //dumpWeightsToFile("tensors_out/fc1_relu_fc.out", fc1_relu);  
-  printTensorDims(fc1_relu);
-
-  // IMPORTANT: Adding errors to the FC1 layer output
-  //tensorAddError(fc1_relu, 3);
- 
-  // Layer-2
-  void* fc2out = tensorGemmGPU(fc1_relu, fc2_weights);  
-  //dumpWeightsToFile("tensors_out/fc2out_fc.out", fc2out);  
-  printTensorDims(fc2out);
-  
-  void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  //dumpWeightsToFile("tensors_out/fc2_biasout_fc.out", fc2_bias_out);  
-  printTensorDims(fc2_bias_out);
-
-  void* fc2_relu = tensorRelu(fc2_bias_out);
-  //dumpWeightsToFile("tensors_out/fc2_relu_fc.out", fc2_relu);  
-  printTensorDims(fc2_relu);
-
-  void* result = tensorSoftmax(fc2_relu);
-  printTensorDims(result);
-
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size, result);
-  // THINK: I believe that comparing the results do not need to be part of the HPVM graph
-}
-
-
-
-int main(){
-
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  //testFCNetworkArchCPU();
-
-  testFCNetworkArchGPU();
-
-  llvm_hpvm_cleanupTensorRt();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_network2.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_network2.cc
deleted file mode 100644
index fc00532a1b3712fab9d098a9a8e1a1586f1458a5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_network2.cc
+++ /dev/null
@@ -1,94 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-#include "../include/types.h"
-
-
-void test4LayerFC(){
-
-  printf("********* 4-layer FC Network ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				float_type,
-				test_batch_size, 1, 28, 28);    
-  void* fc1_weights = readTrainedWeights("../model_params/FC_network1/fc1.bin",
-					 float_type, 1, 1, 784, 1000);  
-  void* fc1_bias = readTrainedWeights("../model_params/FC_network1/fc1_bias.bin",
-				      float_type, 1, 1000, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/FC_network1/fc2.bin",
-						  float_type, 1, 1, 1000, 500);  
-  void* fc2_bias = readTrainedWeights("../model_params/FC_network1/fc2_bias.bin",
-					       float_type, 1, 500, 1, 1);  
-  void* fc3_weights = readTrainedWeights("../model_params/FC_network1/fc3.bin",
-						  float_type, 1, 1, 500, 200);  
-  void* fc3_bias = readTrainedWeights("../model_params/FC_network1/fc3_bias.bin",
-					       float_type, 1, 200, 1, 1);  
-  void* fc4_weights = readTrainedWeights("../model_params/FC_network1/fc4.bin",
-						  float_type, 1, 1, 200, 10);  
-  void* fc4_bias = readTrainedWeights("../model_params/FC_network1/fc4_bias.bin",
-					       float_type, 1, 10, 1, 1);  
-
-  //dumpWeightsToFile("tensors_out/input_fc.out", input);
-  //dumpWeightsToFile("tensors_out/fc1_w_fc.out", fc1_weights);  
-
-  // Start Profiling execution times of Tensor operations
-  startProfiling();
-  
-  // Layer-1
-  void* fc1out = tensorGemmGPU(input, fc1_weights);  
-  printTensorDims(fc1out);
-  //dumpWeightsToFile("tensors_out/fc1out_fc.out", fc1out);  
-  
-  void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-  printTensorDims(fc1_bias_out);
-  //dumpWeightsToFile("tensors_out/fc_fc1.out", fc1_bias_out);
- 
-  // Layer-2
-  void* fc2out = tensorGemmGPU(fc1_bias_out, fc2_weights);  
-  printTensorDims(fc2out);
-  
-  void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  printTensorDims(fc2_bias_out);
-
-  // Layer-3
-  void* fc3out = tensorGemmGPU(fc2_bias_out, fc3_weights);  
-  printTensorDims(fc3out);
-  
-  void* fc3_bias_out = tensorAdd(fc3out, fc3_bias);
-  printTensorDims(fc3_bias_out);
-
-  // Layer-4
-  void* fc4out = tensorGemmGPU(fc3_bias_out, fc4_weights);  
-  printTensorDims(fc4out);
-  
-  void* fc4_bias_out = tensorAdd(fc4out, fc4_bias);
-  printTensorDims(fc4_bias_out);
- 
-  void* result = tensorSoftmax(fc4_bias_out);
-  printTensorDims(result);
-
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size, result);
-} 
-
-
-
-
-int main(){
-
-  llvm_hpvm_initTensorRt(0);
-
-  test4LayerFC();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_network3.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_network3.cc
deleted file mode 100644
index 531bb01695cddb70de0f9bea90f6b229679e9bce..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_fc_network3.cc
+++ /dev/null
@@ -1,93 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-#include "../include/types.h"
-
-
-void test4LayerFC(){
-
-  printf("********* 4-layer FC Network ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				float_type,
-				test_batch_size, 1, 28, 28);    
-  void* fc1_weights = readTrainedWeights("../model_params/FC_network3/fc1.bin",
-					 float_type, 1, 1, 784, 512);  
-  void* fc1_bias = readTrainedWeights("../model_params/FC_network3/fc1_bias.bin",
-				      float_type, 1, 512, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/FC_network3/fc2.bin",
-					 float_type, 1, 1, 512, 256);  
-  void* fc2_bias = readTrainedWeights("../model_params/FC_network3/fc2_bias.bin",
-				      float_type, 1, 256, 1, 1);  
-  void* fc3_weights = readTrainedWeights("../model_params/FC_network3/fc3.bin",
-					 float_type, 1, 1, 256, 128);  
-  void* fc3_bias = readTrainedWeights("../model_params/FC_network3/fc3_bias.bin",
-				      float_type, 1, 128, 1, 1);  
-  void* fc4_weights = readTrainedWeights("../model_params/FC_network3/fc4.bin",
-					 float_type, 1, 1, 128, 10);  
-  void* fc4_bias = readTrainedWeights("../model_params/FC_network3/fc4_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-  // Start Profiling execution times of Tensor operations
-  startProfiling();
-  
-  // Layer-1
-  void* fc1out = tensorGemmGPU(input, fc1_weights);  
-  printTensorDims(fc1out);
-  
-  void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-  printTensorDims(fc1_bias_out);
-  void* fc1_relu = tensorRelu(fc1_bias_out);
- 
-  // Layer-2
-  void* fc2out = tensorGemmGPU(fc1_relu, fc2_weights);  
-  printTensorDims(fc2out);
-  
-  void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  printTensorDims(fc2_bias_out);
-  void* fc2_relu = tensorRelu(fc2_bias_out);
-
-  // Layer-3
-  void* fc3out = tensorGemmGPU(fc2_relu, fc3_weights);  
-  printTensorDims(fc3out);
-  
-  void* fc3_bias_out = tensorAdd(fc3out, fc3_bias);
-  printTensorDims(fc3_bias_out);
-  void* fc3_relu = tensorRelu(fc3_bias_out);
-
-  // Layer-4
-  void* fc4out = tensorGemmGPU(fc3_relu, fc4_weights);  
-  printTensorDims(fc4out);
-  
-  void* fc4_bias_out = tensorAdd(fc4out, fc4_bias);
-  printTensorDims(fc4_bias_out);  
-  void* fc4_relu = tensorRelu(fc4_bias_out);
- 
-  void* result = tensorSoftmax(fc4_relu);
-  printTensorDims(result);
-
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size, result);
-} 
-
-
-
-
-int main(){
-
-  llvm_hpvm_initTensorRt(0);
-
-  test4LayerFC();
-
-  llvm_hpvm_cleanupTensorRt();
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_lenet.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_lenet.cc
deleted file mode 100644
index e21b09fbf59c6ceee2adcf6df798ef04351a03ef..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_lenet.cc
+++ /dev/null
@@ -1,178 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenetArch2(){
-
-  printf("********* Lenet Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_params/conv1.bin",
-					  CUDNN_DATA_FLOAT, 20, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_params/conv1.bias.bin",
-					CUDNN_DATA_FLOAT, 1, 20, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_params/conv2.bin",
-					  CUDNN_DATA_FLOAT, 50, 20, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_params/conv2.bias.bin",
-					CUDNN_DATA_FLOAT, 1, 50, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_params/ip1.bin",
-					 CUDNN_DATA_FLOAT, 1, 1, 800, 500);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet_params/ip1.bias.bin",
-				      CUDNN_DATA_FLOAT, 1, 1, 1, 500);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_params/ip2.bin",
-					 CUDNN_DATA_FLOAT, 1, 1, 500, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet_params/ip2.bias.bin",
-				      CUDNN_DATA_FLOAT, 1, 1, 1, 10);  
-
-
-  // Start power and performnce profiling 
-  startProfiling();
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-  void* conv1out = tensorConvolution(input, conv1_filter, 0, 0, 1, 1,
-				     conv_mode, conv_precision);
-  // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-  tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-  printTensorDims(conv1out);
-
-  void* pool1out = tensorPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool1out);
-
-  // NOTE: input channels have to match between tensor op inputs and outputs 
-  void* conv2out = tensorConvolution(pool1out, conv2_filter, 0, 0, 1, 1,
-				     conv_mode, conv_precision);
-  tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-
-  printTensorDims(conv2out);
-
-  void* pool2out = tensorPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool2out);
-   
-  void* gemm1out = tensorGemm(pool2out, fc1_weights);  
-  printTensorDims(gemm1out);
-  
-  void* gemm1biasout = tensorGemmBias(gemm1out, fc1_bias);
-  printTensorDims(gemm1biasout);
-
-  void* relu1out = tensorRelu(gemm1biasout);
-  printTensorDims(relu1out);
-  
-  void* gemm2out = tensorGemm(relu1out, fc2_weights);  
-  printTensorDims(gemm2out);
-  
-  void* gemm2_biasout = tensorGemmBias(gemm2out, fc2_bias);
-  printTensorDims(gemm2_biasout);
-  
-  void* result = tensorSoftmax(gemm2_biasout);
-  printTensorDims(result);
-
-  // End profiling and dump output to profile.txt
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size, result);
-  // THINK: I believe that comparing the results do not need to be part of the HPVM graph
-}
-
-
-/* This architecture REMOVES the bias adds */
-void testLenetArch3(){
-
-  printf("********* Lenet Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;
-  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-					 CUDNN_DATA_FLOAT,
-					 test_batch_size, 1, 28, 28);
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet_params/conv1.bin",
-						   CUDNN_DATA_FLOAT, 20, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet_params/conv1.bias.bin",
-						 CUDNN_DATA_FLOAT, 1, 20, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet_params/conv2.bin",
-						   CUDNN_DATA_FLOAT, 50, 20, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet_params/conv2.bias.bin",
-						 CUDNN_DATA_FLOAT, 1, 50, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet_params/ip1.bin",
-						  CUDNN_DATA_FLOAT, 1, 1, 800, 500);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet_params/ip2.bin",
-						  CUDNN_DATA_FLOAT, 1, 1, 500, 10);  
-
-  /* Convolution specific parameters */
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-  
-  void* conv1out = tensorConvolution(input, conv1_filter, 0, 0, 1, 1,
-				     conv_mode, conv_precision);
-  // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-  tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-  printTensorDims(conv1out);
-
-  void* pool1out = tensorPooling(conv1out, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool1out);
-
-  // NOTE: input channels have to match between tensor op inputs and outputs 
-  void* conv2out = tensorConvolution(pool1out, conv2_filter, 0, 0, 1, 1,
-				     conv_mode, conv_precision);
-  printTensorDims(conv2out);
-
-  void* pool2out = tensorPooling(conv2out, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool2out);
-   
-  void* gemm1out = tensorGemm(pool2out, fc1_weights);  
-  printTensorDims(gemm1out);
-
-  void* relu1out = tensorRelu(gemm1out);
-  printTensorDims(relu1out);
-  
-  void* gemm2out = tensorGemm(relu1out, fc2_weights);  
-  printTensorDims(gemm2out);
-
-  void* result = tensorSoftmax(gemm2out);
-  printTensorDims(result);
-	 
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size, result);
-  // THINK: I believe that comparing the results do not need to be part of the HPVM graph
-  printf("END of Lenet Arch3 -- \n");
-}
-
-
-int main(){
-
-  llvm_hpvm_initTensorRt(0);
-
-  //testTensorAdd();
-  //testTensorConv();
-  //testTensorPool();
-  //testTensorGemm();
-  //testTensorGemmBias();
-  //testTensorRelu();
-  //testTensorSoftmax();
-
-  //testLenetArch();
-  testLenetArch2();
-  //testLenetArch3();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_lenet2.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_lenet2.cc
deleted file mode 100644
index 77f256320e158fb13555e83d0fbe260ce9d3a83f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_lenet2.cc
+++ /dev/null
@@ -1,111 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testLenet2Arch(){
-
-  printf("********* Lenet-2 Architecture ********** \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000;  
-  void* input = readInputTensor("../model_params/lenet_params/datasets/t10k-images-idx3-ubyte",
-				CUDNN_DATA_FLOAT,
-				test_batch_size, 1, 28, 28);
-
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  // IMP: The output channels matches the trained model - not the Lenet arch proposed in Andrew Ng's class
-  void* conv1_filter = readTrainedWeights("../model_params/lenet2_params/conv1.bin",
-					  float_type, 32, 1, 5, 5);    
-  void* conv1_bias = readTrainedWeights("../model_params/lenet2_params/conv1_bias.bin",
-					float_type, 1, 32, 1, 1);  
-  void* conv2_filter = readTrainedWeights("../model_params/lenet2_params/conv2.bin",
-					  float_type, 64, 32, 5, 5);  
-  void* conv2_bias = readTrainedWeights("../model_params/lenet2_params/conv2_bias.bin",
-					float_type, 1, 64, 1, 1);  
-  void* fc1_weights = readTrainedWeights("../model_params/lenet2_params/fc1.bin",
-					 float_type, 1, 1, 7*7*64, 1024);  
-  void* fc1_bias = readTrainedWeights("../model_params/lenet2_params/fc1_bias.bin",
-				      float_type, 1, 1024, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/lenet2_params/fc2.bin",
-					 float_type, 1, 1, 1024, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/lenet2_params/fc2_bias.bin",
-				      float_type, 1, 10, 1, 1);  
-
-
-  // Start power and performnce profiling 
-  startProfiling();
-  
-  int conv_mode = 1; // NOTE: using CROSS_CORRELATION
-  int conv_precision = 0; // NOTE: using Float as compute precision. FIXIT: use enum
-
-  // NOTE: 'SAME' convolution
-  void* conv1out = tensorConvolution(input, conv1_filter, 2, 2, 1, 1,
-				     conv_mode, conv_precision);
-
-  // NOTE: For tensorAdd, the only dimension that MUST match is channels  
-  tensorAdd(conv1out, conv1_bias); // NOTE: In place operation
-  printTensorDims(conv1out);
-
-  void* conv1_reluout = tensorRelu(conv1out);
-  //dumpWeightsToFile("tensors_out/conv1.out", conv1_reluout);  
-
-  void* pool1out = tensorPooling(conv1_reluout, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool1out);
-  // NOTE: input channels have to match between tensor op inputs and outputs 
-  void* conv2out = tensorConvolution(pool1out, conv2_filter, 2, 2, 1, 1,
-				     conv_mode, conv_precision);
-  tensorAdd(conv2out, conv2_bias); // NOTE: In place operation
-  printTensorDims(conv2out);
-
-  void* conv2_reluout = tensorRelu(conv2out);
-  //dumpWeightsToFile("tensors_out/conv2.out", conv2_reluout);  
-
-  void* pool2out = tensorPooling(conv2_reluout, 0, 2, 2, 0, 0, 2, 2);
-  printTensorDims(pool2out);
-  //dumpWeightsToFile("tensors_out/maxpool2.out", pool2out);  
-  
-  void* gemm1out = tensorGemmGPU(pool2out, fc1_weights);  
-  printTensorDims(gemm1out);
-  //dumpWeightsToFile("tensors_out/gemm1.out", gemm1out);  
-  
-  void* gemm1biasout = tensorAdd(gemm1out, fc1_bias);
-  printTensorDims(gemm1biasout);
-
-  void* relu1out = tensorRelu(gemm1biasout);
-  printTensorDims(relu1out);
-  
-  void* gemm2out = tensorGemmGPU(relu1out, fc2_weights);  
-  printTensorDims(gemm2out);
-  
-  void* gemm2_biasout = tensorAdd(gemm2out, fc2_bias);
-  printTensorDims(gemm2_biasout);
-  
-  void* result = tensorSoftmax(gemm2_biasout);
-  printTensorDims(result);
-
-  // End profiling and dump output to profile.txt
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte",
-		  test_batch_size, result);
-  // THINK: I believe that comparing the results do not need to be part of the HPVM graph
-}
-
-
-int main(){
-
-  llvm_hpvm_initTensorRt(0);
-
-  testLenet2Arch();
-  
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_ops.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_ops.cc
deleted file mode 100644
index 43151dd965841861b1535c3dc3ad53c0335eeeb7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/test_ops.cc
+++ /dev/null
@@ -1,557 +0,0 @@
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-
-
-void testTensorGemm(){
-
-  printf("***** TensorSgemm ***** \n\n");
-  void* lhs_ptr = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 5, 4, 1, 1);
-  struct Tensor* lhs = (struct Tensor*) lhs_ptr;
-  fillTensorWithOnes(lhs);
-  
-  float* data_arr = (float*) lhs->host_data;
-  for(int i = 0; i < lhs->num_elems; i++){
-    data_arr[i] = (i / 4) + 1;
-  }
-  
-  void* rhs = create4DTensor(CUDNN_TENSOR_NCHW, CUDNN_DATA_FLOAT, 1, 1, 4, 3);
-  fillTensorWithOnes(rhs);
-  
-  void* output = tensorGemmCPU(lhs, rhs);   
-  printTensorValues(output);
-
-  void* bias_ptr = create4DTensor(CUDNN_TENSOR_NCHW, CUDNN_DATA_FLOAT, 1, 3, 1, 1);
-  struct Tensor* bias = (struct Tensor*) bias_ptr;
-  fillTensorWithOnes(bias);
-
-  float* bias_arr = (float*) bias->host_data;
-  for(int i = 0; i < bias->num_elems; i++){
-    bias_arr[i] = i + 1;
-  }
-  
-  void* output2 = tensorAdd(output, bias);
-  printTensorValues(output2);
-}
-
-
-void testTensorHgemm(){
-
-  printf("***** TensorHgemm ***** \n\n");
-  void* lhs_ptr = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 5, 4, 1, 1);
-  struct Tensor* lhs = (struct Tensor*) lhs_ptr;
-  fillTensorWithOnes(lhs);
-  
-  float* data_arr = (float*) lhs->host_data;
-  for(int i = 0; i < lhs->num_elems; i++){
-    data_arr[i] = (i / 4) + 1;
-  }
-  
-  void* rhs = create4DTensor(CUDNN_TENSOR_NCHW, CUDNN_DATA_FLOAT, 1, 1, 4, 3);
-  fillTensorWithOnes(rhs);
-  
-  void* output = tensorHalfGemm(lhs, rhs);   
-  printTensorValues(output);
-
-  void* bias_ptr = create4DTensor(CUDNN_TENSOR_NCHW, CUDNN_DATA_FLOAT, 1, 3, 1, 1);
-  struct Tensor* bias = (struct Tensor*) bias_ptr;
-  fillTensorWithOnes(bias);
-
-  float* bias_arr = (float*) bias->host_data;
-  for(int i = 0; i < bias->num_elems; i++){
-    bias_arr[i] = i + 1;
-  }
-  
-  void* output2 = tensorAdd(output, bias);
-  printTensorValues(output2);
-}
-
-
-void testTensorHgemm2(){
-
-  printf("***** TensorHgemm ***** \n\n");
-  void* lhs_ptr = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW,
-				 10000, 800, 1, 1);
-  struct Tensor* lhs = (struct Tensor*) lhs_ptr;
-  
-  float* data_arr = (float*) lhs->host_data;
-  for(int i = 0; i < lhs->num_elems; i++){
-    data_arr[i] = (i / 4) + 1;
-  }
-  
-  void* rhs = create4DTensor(CUDNN_TENSOR_NCHW, CUDNN_DATA_FLOAT,
-			     1, 1, 800, 800);
-  fillTensorWithOnes(rhs);
-  
-  void* output = tensorHalfGemm(lhs, rhs);
-  //printTensorValues(output);
-}
-
-
-void testTensorSgemm2(){
-
-  printf("***** TensorSgemm ***** \n\n");
-  void* lhs_ptr = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW,
-				 10000, 800, 1, 1);
-  struct Tensor* lhs = (struct Tensor*) lhs_ptr;
-  
-  float* data_arr = (float*) lhs->host_data;
-  for(int i = 0; i < lhs->num_elems; i++){
-    data_arr[i] = (i / 4) + 1;
-  }
-  
-  void* rhs = create4DTensor(CUDNN_TENSOR_NCHW, CUDNN_DATA_FLOAT,
-			     1, 1, 800, 800);
-  fillTensorWithOnes(rhs);
-  
-  void* output = tensorGemmGPU(lhs, rhs);
-  //printTensorValues(output);
-}
-
-
-
-void testTensorGemmGPU(){
-
-  printf("***** TensorSgemm ***** \n\n");
-  void* lhs_ptr = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 5, 4, 1, 1);
-  struct Tensor* lhs = (struct Tensor*) lhs_ptr;
-  fillTensorWithOnes(lhs);
-
-  float* data_arr = (float*) lhs->host_data;
-  for(int i = 0; i < lhs->num_elems; i++){
-    data_arr[i] = (i / 4) + 1;
-  }
-  
-  void* rhs = create4DTensor(CUDNN_TENSOR_NCHW, CUDNN_DATA_FLOAT, 1, 1, 4, 3);
-  fillTensorWithOnes(rhs);
-  
-  void* output = tensorGemmGPU(lhs, rhs);   
-  printTensorValues(output);
-
-  void* bias_ptr = create4DTensor(CUDNN_TENSOR_NCHW, CUDNN_DATA_FLOAT, 1, 3, 1, 1);
-  struct Tensor* bias = (struct Tensor*) bias_ptr;
-  fillTensorWithOnes(bias);
-
-  float* bias_arr = (float*) bias->host_data;
-  for(int i = 0; i < bias->num_elems; i++){
-    bias_arr[i] = i + 1;
-  }
-  
-  void* output2 = tensorAdd(output, bias);
-  printTensorValues(output2);
- 
-}
-
-
-
-void testTensorGemmBias(){
-
-  // NOTE: 2nd dim of bias and d2*d3*d4 for the input tensor MUST match 
-  printf("***** TensorGemmBias ***** \n\n");
-  void* input = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 1, 2, 2);
-  fillTensorWithOnes(input); 
-  void* bias = create2DTensor(CUDNN_DATA_FLOAT, 1, 4);
-  fillTensorWithOnes(bias);
-
-  void* output = tensorGemmBias(input, bias);
-  printTensorValues(output);
-}
-
-
-
-void testTensorConv2(){
-
-  int conv_mode = 1;  // CROSS_CORRELATION mode
-  int compute_precision = 0; // floating point precision 
-  
-  void* input = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 2, 3, 3);
-  fillWithOnesAndTwos(input);
-  void** splits = tensorSplit(input, 2, 1);
-
-  void* conv2W = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 1, 2, 2);
-  fillTensorWithOnes(conv2W);
-		     
-  void** conv2fils = tensorSplit(conv2W, 2, 0);
-
-  void* conv2a_out = tensorConvolution(splits[0], conv2fils[0], 0, 0,
-				       1, 1, conv_mode, compute_precision);
-  printTensorDims(conv2a_out);
-
-  void* conv2b_out = tensorConvolution(splits[1], conv2fils[1], 0, 0,
-				       1, 1, conv_mode, compute_precision);
-  printTensorDims(conv2b_out);
- 
-  void* conv2_outs[2];
-  conv2_outs[0] = conv2a_out;
-  conv2_outs[1] = conv2b_out;
-
-  void* conv2_concat_out = tensorConcat(conv2_outs, 2, 1);
-  printTensorDims(conv2_concat_out);
-  printTensorValues(conv2_concat_out);
-  
-}
-
-
-
-void testTensorConv3(){
-
-  int conv_mode = 1;  // CROSS_CORRELATION mode
-  int compute_precision = 0; // floating point precision
-  
-  void* input = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 96, 28, 28);
-  fillTensorWithOnes(input);
-  void** splits = tensorSplit(input, 2, 1);
-
-  void* conv2W = readTrainedWeights("../alexnet/params/conv2.bin",
-				    CUDNN_DATA_FLOAT, 256, 48, 5, 5);
-  		     
-  void** conv2fils = tensorSplit(conv2W, 2, 0);
-
-  void* conv2a_out = tensorConvolution(splits[0], conv2fils[0], 2, 2,
-				       1, 1, conv_mode, compute_precision);
-  printTensorDims(conv2a_out);
-
-  void* conv2b_out = tensorConvolution(splits[1], conv2fils[1], 2, 2,
-				       1, 1, conv_mode, compute_precision);
-  printTensorDims(conv2b_out);
- 
-  void* conv2_outs[2];
-  conv2_outs[0] = conv2a_out;
-  conv2_outs[1] = conv2b_out;
-
-  void* conv2_concat_out = tensorConcat(conv2_outs, 2, 1);
-  printTensorDims(conv2_concat_out);
-  //printTensorValues(conv2_concat_out);
-  dumpWeightsToFile("tensors_out/conv2_test.out", conv2_concat_out); 
-
-  void* conv2bias = readTrainedWeights("../alexnet/params/conv2.bias.bin",
-						  CUDNN_DATA_FLOAT, 1, 256, 1, 1);  
-  void* conv2bias_out = tensorAdd(conv2_concat_out, conv2bias);
-  printTensorDims(conv2bias_out);
-  
-  dumpWeightsToFile("tensors_out/conv2_bias_test.out", conv2bias_out); 
-
-}
-
-
-
-
-
-
-
-void testLRN(){
-
-  void* input = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 20, 20, 20, 20);
-  fillTensorWithOnes(input);
-
-  unsigned LRN_window = 5;
-  double LRN_alpha = 2e-05;
-  printf("LRN_alpha = %f \n", LRN_alpha);
-  
-  double LRN_beta = 0.75;
-  double LRN_k = 1.0;
-
-  // TEST-point - Compare TF vs CUDNN
-  void* lrn1out = tensorLRN(input, LRN_window, LRN_alpha, LRN_beta, LRN_k);
-  printTensorDims(lrn1out);
-  dumpWeightsToFile("tensors_out/lrn1_test.out", lrn1out);
-
-  void* input2 = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 7, 7, 7, 7);
-  fillTensorWithOnes(input2);
-
-  LRN_window = 5;
-  LRN_alpha = 0.5 * LRN_window;
-  
-  LRN_beta = 0.75;
-  LRN_k = 1.0;
-
-  void* lrn2out = tensorLRN(input2, LRN_window, LRN_alpha, LRN_beta, LRN_k);
-  printTensorDims(lrn2out);
-  dumpWeightsToFile("tensors_out/lrn2_test.out", lrn2out); 
-}
-
-
-
-
-void testTensorAdd(){
-
-  // Tensor add with equal dimensions
-  void* x = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 2, 2, 2);
-  void* bias = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 2, 2, 2);
-  fillTensorWithOnes(x);
-  fillTensorWithOnes(bias);
-
-  printTensorValues(x);
-  printTensorValues(bias);
-
-  tensorAdd(x, bias);
-  printTensorValues(x);
-
-  // Tensor addd with matching channel dimension
-  void* x2 = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 2, 2, 2);
-  void* bias2 = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 2, 1, 1);
-  fillTensorWithOnes(x2);
-  fillTensorWithOnes(bias2);
-
-  tensorAdd(x2, bias2);
-  printTensorValues(x2);
-}
-
-
-void testTensorError(){
-
-  // Tensor add with equal dimensions
-  void* x = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 2, 2, 128);
-  fillTensorWithOnes(x);
-
-  Tensor* x_tensor = (Tensor*) x;
-  float* data_arr = (float*) x_tensor->host_data;
-  for(int i = 0; i < x_tensor->num_elems; i++){
-    data_arr[i] = 0.2;
-  }
-  
-  tensorAddError(x, 3);
-  printTensorValues(x);
-}
-
-
-void testTensorConv(){
-
-  // NOTE: The input channel count value (param2 to Tensor and Filter) must be the same
-  void* x3 = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 2, 4, 4);
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  void* filter = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 2, 2, 2);
-  fillTensorWithOnes(x3);
-  fillTensorWithOnes(filter);
-
-  int conv_mode = 1; // NOTE: uses CROSS_CORRELATION
-  int compute_precision = 0; // floating point precision for conv
-  
-  void* conv1 = tensorConvolution(x3, filter, 0, 0,
-				  1, 1, conv_mode, compute_precision);
-  printTensorValues(conv1);
-
-}
-
-
-void testTensorGroupedConv(){
-
-  // NOTE: The input channel count value (param2 to Tensor and Filter) must be the same
-  void* x3 = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 2, 4, 4);
-  // NOTE: Filter descriptors do NOT have batch size
-  // NOTE: First two dims are output channels (configurable), input channels (MUST match input channels)
-  void* filter = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 1, 3, 3);
-  fillTensorWithOnes(x3);
-  fillTensorWithOnes(filter);
-
-  int conv_mode = 1; // NOTE: uses CROSS_CORRELATION
-  int conv_groups = 2;
-  
-  void* conv1 = tensorConvolution(x3, filter, 2, 2,
-				  2, 2, conv_mode, conv_groups);
-  printTensorValues(conv1);
-
-  // NOTE: For cudnnTensorAdd, the only dimension that MUST match is channels  
-  //void* bias3 = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 3, 1, 1);
-  // fillTensorWithOnes(bias3);
-  //tensorAdd(conv1, bias3);
-  //printTensorValues(conv1);
-}
-
-
-void testTensorPool(){
-  void* x = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 1, 4, 4);
-  fillTensorWithOnes(x); 
-  void* output = tensorPooling(x, 0, 2, 2, 0, 0, 1, 1);
-  printTensorValues(output);
-}
-
-
-void testTensorBatchNorm(){
-
-  void* x = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 3, 2, 2);
-  fillTensorWithVal(x, 3);
-
-  void* gamma = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 3, 1, 1);
-  fillTensorWithVal(gamma, 1);
-
-  void* beta = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 3, 1, 1);
-  fillTensorWithVal(beta, 0);
-
-  void* mean = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 3, 1, 1);
-  fillTensorWithVal(mean, 1);
-
-  void* variance = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 1, 3, 1, 1);
-  fillTensorWithVal(variance, 1);
-
-  
-  void* output = tensorBatchNorm(x, gamma, beta, mean, variance, 0.01);  
-  printTensorValues(output);  
-}
-
-
-void testTensorRelu(){
-
-  // NOTE: 2nd dim of bias and d2*d3*d4 for the input tensor MUST match 
-  printf("***** TensorRelu ***** \n\n");
-  void* input = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 1, 2, 2);
-  fillTensorWithNegOnes(input);
-
-  void* output = tensorRelu(input);
-  printTensorValues(output);
-}
-
-
-void testTensorSoftmax(){
-
-  printf("***** TensorSoftmax ***** \n\n");
-  void* input = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 4, 1, 1);
-
-  float* host_ptr = (float*) ((struct Tensor*) input)->host_data;
-  host_ptr[0] = 0.1;
-  host_ptr[1] = 0.2;
-  host_ptr[2] = 0.3;
-  host_ptr[3] = 0.4;
-  host_ptr[4] = 0.5;
-  host_ptr[5] = 0.6;
-  host_ptr[6] = 0.7;
-  host_ptr[7] = 2.5;
-
-  void* output = tensorSoftmax(input);
-  printTensorValues(output);
-}
-
-
-void testSoftmaxOutput(void* output_ptr){
-
-  struct Tensor* output = (struct Tensor*) output_ptr;
-  
-  size_t batch_dim = output->dims.dim_sizes[0];
-  size_t channels = output->dims.dim_sizes[1];
-
-  float* data = (float*) output->host_data;
-  for(int i = 0; i < batch_dim; i++){
-    float sum = 0.0;
-    for(int j = 0; j < channels; j++){
-      sum += data[i * channels + j];
-    }
-    printf("output_sum = %f \n", sum);
-  }
-  
-}
-
-
-void testQuantization(){
-
-  printf("***** TensorQuantize ***** \n\n");
-  void* input = create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 4, 1, 1);
-
-  float* host_ptr = (float*) ((struct Tensor*) input)->host_data;
-  host_ptr[0] = -0.1;
-  host_ptr[1] = -25;
-  host_ptr[2] = 0.2;
-  host_ptr[3] = -0.4;
-  host_ptr[4] = 1.7;
-  host_ptr[5] = -2.9;
-  host_ptr[6] = 0.7;
-  host_ptr[7] = 0.99;
-
-  void* quantize_result = quantizeTensorPromise(input, -4, 6);
-  
-  printTensorValues(quantize_result);
-
-  void* error_out = addPromiseError(quantize_result, 7);
-
-  printTensorValues(error_out);
-
-}
-
-
-
-
-void testSampleFilter(){
-
-  printf("***** Tensor Sample Filter ***** \n\n");
-  Tensor* input = (Tensor*) create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 2, 2, 3, 3);
-  //fillTensorWithVal(input, 3);
-  fillWithOnesAndTwos(input);
-  
-  Tensor* input2 = (Tensor*) create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, 3, 2, 32, 32);
-  fillTensorWithVal(input2, 1);
-
-  /*  float* host_ptr = (float*) ((struct Tensor*) input)->host_data;
-  host_ptr[0] = -0.1;
-  host_ptr[1] = -25;
-  host_ptr[2] = 0.2;
-  host_ptr[3] = -0.4;
-  host_ptr[4] = 1.7;
-  host_ptr[5] = -2.9;
-  host_ptr[6] = 0.7;
-  host_ptr[7] = 0.99;
-  */
-
-  printTensorValues(input);
-
-  /*  printf("\n\n");
-
-  hpvm_request_tensor(input, DEVICE);
-    
-  sampleFilter(input, 2, 1);
-
-  hpvm_request_tensor(input, HOST);
-
-  printTensorValues(input);
-  */
-
-  void* exact_res = tensorConvolution(input2, input, 0, 0,
-				      1, 1, 1, 1);
-  printTensorValues(exact_res);
-  
-  void* res = tensorConvSampSim(input2, input, 0, 0, 1, 1, 1, 1, 4, 0);
-
-  //void* res = tensorConvApprox(input2, input, 0, 0, 1, 1, 1, 1, 1, 1, 4, 3);
- 
-  printTensorValues(res);
-  
-}
-
-
-
-int main(){
-
-  llvm_hpvm_initTensorRt(0);
-
-  startProfiling();
-  
-  //testTensorHgemm2();
-  //testTensorSgemm2();
-  //testTensorConv();
-  //testTensorError();
-
-  //testQuantization();
-
-  //testTensorConv();
-  //testTensorGroupedConv();
-
-  //testTensorBatchNorm();
-  
-  //testTensorGemm();
-  //testTensorGemmGPU();
-  //testTensorGemmBias();  
-  //testTensorConv2();
-  //testTensorConv3();
-  //testLRN();
-
-
-  testSampleFilter();
-  
-    
-  stopProfiling();
-
-  return 0;
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar10.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar10.cc
deleted file mode 100644
index 60f09dd5d7b91eda7e23e23bc76dbae0f27beaf2..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar10.cc
+++ /dev/null
@@ -1,161 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar10_2/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 10000;
-  int batch_size = 1000;
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-  for(int i = 0; i < batch_count; i++){
-
-    int start = i * batch_size;
-    int end = (i + 1) * batch_size;
-    
-    void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32); 
- 
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-    void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-    void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-    void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-    void* var_10 = tensorRelu(var_9); 
-    void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-    void* var_14 = tensorRelu(var_13); 
-    void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-    void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-    void* var_18 = tensorRelu(var_17); 
-    void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-    void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-    void* var_22 = tensorRelu(var_21); 
-    void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-    void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-    void* var_26 = tensorRelu(var_25); 
-    void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-    void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-    void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-    void* var_30 = tensorRelu(var_29); 
-    void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-    void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-    void* var_34 = tensorRelu(var_33); 
-    void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-    void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-    void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-    void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-    void* var_42 = tensorRelu(var_41); 
-    void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-    void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-    void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-    void* var_50 = tensorRelu(var_49); 
-    void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-    void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-    void* var_55 = tensorAdd(var_54, dense_1_b); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-    void* var_59 = tensorAdd(var_58, dense_2_b); 
-    void* var_60 = tensorSoftmax(var_59); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-    float accuracy = computeAccuracy2(labels,batch_size,var_60); 
-    final_accuracy += accuracy;
-    
-    freeBatchMemory();
-  }
-
-  final_accuracy = final_accuracy / batch_count;
-  dumpFinalAccuracy(final_accuracy);
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100.cc
deleted file mode 100644
index 63ca073e15d817a2f2898756353b958d00988e82..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100.cc
+++ /dev/null
@@ -1,160 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 10000; 
-  int batch_size = 2500; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-    void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-    void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-    void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-    void* var_10 = tensorRelu(var_9); 
-    void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-    void* var_14 = tensorRelu(var_13); 
-    void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-    void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-    void* var_18 = tensorRelu(var_17); 
-    void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-    void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-    void* var_22 = tensorRelu(var_21); 
-    void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-    void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-    void* var_26 = tensorRelu(var_25); 
-    void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-    void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-    void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-    void* var_30 = tensorRelu(var_29); 
-    void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-    void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-    void* var_34 = tensorRelu(var_33); 
-    void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-    void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-    void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-    void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-    void* var_42 = tensorRelu(var_41); 
-    void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-    void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-    void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-    void* var_50 = tensorRelu(var_49); 
-    void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-    void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-    void* var_55 = tensorAdd(var_54, dense_1_b); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-    void* var_59 = tensorAdd(var_58, dense_2_b); 
-    void* var_60 = tensorSoftmax(var_59); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_60, 100); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100_5.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100_5.cc
deleted file mode 100644
index 3ee273d70aea6d74cfa55f250e999b05506f9b21..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100_5.cc
+++ /dev/null
@@ -1,167 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-  //std::string input_path =  dir_prefix + std::string("vgg16_cifar100_calib.bin"); 
-  //std::string labels_path =  dir_prefix + std::string("vgg16_cifar100_train_labels.bin");
-
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin");
-  
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 2500;
-  int offset = 5000;
-  
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size + offset; 
-    int end = (i + 1) * batch_size + offset; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-    void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-    void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-    void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-    void* var_10 = tensorRelu(var_9); 
-    void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-    void* var_14 = tensorRelu(var_13); 
-    void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-    void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-    void* var_18 = tensorRelu(var_17); 
-    void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-    void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-    void* var_22 = tensorRelu(var_21); 
-    void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-    void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-    void* var_26 = tensorRelu(var_25); 
-    void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-    void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-    void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-    void* var_30 = tensorRelu(var_29); 
-    void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-    void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-    void* var_34 = tensorRelu(var_33); 
-    void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-    void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-    void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-    void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-    void* var_42 = tensorRelu(var_41); 
-    void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-    void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-    void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-    void* var_50 = tensorRelu(var_49); 
-    void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-    void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-    void* var_55 = tensorAdd(var_54, dense_1_b); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-    void* var_59 = tensorAdd(var_58, dense_2_b); 
-    void* var_60 = tensorSoftmax(var_59); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    //float accuracy = computeAccuracy2(labels, batch_size, var_60, 100);
-    float accuracy = computeTop5Accuracy(labels, batch_size, var_60, 100);
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100_top5_tuner.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100_top5_tuner.cc
deleted file mode 100644
index 56e0e7016f16ce6548d9947e13fda96a931e436b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100_top5_tuner.cc
+++ /dev/null
@@ -1,167 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-  //std::string input_path =  dir_prefix + std::string("vgg16_cifar100_calib.bin"); 
-  //std::string labels_path =  dir_prefix + std::string("vgg16_cifar100_train_labels.bin");
-
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin");
-  
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 4000; 
-  int batch_size = 4000;
-  int offset = 5000;
-  
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size + offset; 
-    int end = (i + 1) * batch_size + offset; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-    void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-    void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-    void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-    void* var_10 = tensorRelu(var_9); 
-    void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-    void* var_14 = tensorRelu(var_13); 
-    void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-    void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-    void* var_18 = tensorRelu(var_17); 
-    void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-    void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-    void* var_22 = tensorRelu(var_21); 
-    void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-    void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-    void* var_26 = tensorRelu(var_25); 
-    void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-    void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-    void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-    void* var_30 = tensorRelu(var_29); 
-    void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-    void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-    void* var_34 = tensorRelu(var_33); 
-    void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-    void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-    void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-    void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-    void* var_42 = tensorRelu(var_41); 
-    void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-    void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-    void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-    void* var_50 = tensorRelu(var_49); 
-    void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-    void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-    void* var_55 = tensorAdd(var_54, dense_1_b); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-    void* var_59 = tensorAdd(var_58, dense_2_b); 
-    void* var_60 = tensorSoftmax(var_59); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    //float accuracy = computeAccuracy2(labels, batch_size, var_60, 100);
-    float accuracy = computeTop5Accuracy(labels, batch_size, var_60, 100);
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100_tuner.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100_tuner.cc
deleted file mode 100644
index 18e419553641160d59930a72695ec0a191c06d74..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar100_tuner.cc
+++ /dev/null
@@ -1,166 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(0); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/"); 
-  //std::string input_path =  dir_prefix + std::string("vgg16_cifar100_calib.bin"); 
-  //std::string labels_path =  dir_prefix + std::string("vgg16_cifar100_train_labels.bin");
-
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin");
-  
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-  startMemTracking(); 
-
-  int test_input_size = 5000; 
-  int batch_size = 5000;
-  int offset = 5000;
-  
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size + offset; 
-    int end = (i + 1) * batch_size + offset; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-    void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-    void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-    void* var_6 = tensorRelu(var_5); 
-    void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-    void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-    void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-    void* var_10 = tensorRelu(var_9); 
-    void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-    void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-    void* var_14 = tensorRelu(var_13); 
-    void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-    void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-    void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-    void* var_18 = tensorRelu(var_17); 
-    void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-    void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-    void* var_22 = tensorRelu(var_21); 
-    void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-    void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-    void* var_26 = tensorRelu(var_25); 
-    void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-    void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-    void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-    void* var_30 = tensorRelu(var_29); 
-    void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-    void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-    void* var_34 = tensorRelu(var_33); 
-    void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-    void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-    void* var_38 = tensorRelu(var_37); 
-    void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-    void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-    void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-    void* var_42 = tensorRelu(var_41); 
-    void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-    void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-    void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-    void* var_50 = tensorRelu(var_49); 
-    void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-    void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-    void* var_55 = tensorAdd(var_54, dense_1_b); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-    void* var_59 = tensorAdd(var_58, dense_2_b); 
-    void* var_60 = tensorSoftmax(var_59); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_60, 100); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar10_tuner.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar10_tuner.cc
deleted file mode 100644
index 552001ba7af481845f75cd95e3249bc7ba7d0a97..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/src/vgg16_cifar10_tuner.cc
+++ /dev/null
@@ -1,180 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-
-int main(int argc, char* argv[]){ 
-
-  int total_runs = 1;
-  if (argc > 1){
-    total_runs = atoi(argv[1]);
-  }
-  
-
-  llvm_hpvm_initTensorRt(1); 
-
-  std::string dir_prefix = std::string("../model_params/vgg16_cifar10_2/"); 
-  //std::string input_path =  dir_prefix + std::string("vgg16_cifar_calib.bin"); 
-  //std::string labels_path =  dir_prefix + std::string("vgg16_train_labels.bin");
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin");
- 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-  std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-  void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-  std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-  void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-  std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-  void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-  std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-  void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-  std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-  void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-  void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-  std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-  void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-  std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-  void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-  void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-  void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-  void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-  void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-  std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-  void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-  std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-  void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-  std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-  void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-  startMemTracking();
-
-  int test_input_size = 500;
-  int batch_size = 500;
-  int offset = 5000;
-  
-  int batch_count = test_input_size / batch_size;
-  float final_accuracy = 0.0;
-
-
-  for(int j = 0; j < total_runs; j++){
-    
-    float final_accuracy = 0.0;
-    for(int i = 0; i < batch_count; i++){
-
-      int start = i * batch_size + offset;
-      int end = (i + 1) * batch_size + offset;
-    
-      void* input = readInputBatch(input_path.c_str(), 0,start,end,3,32,32); 
- 
-      void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-      void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-      void* var_2 = tensorRelu(var_1); 
-      void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-      void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-      void* var_6 = tensorRelu(var_5); 
-      void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-      void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-      void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-      void* var_10 = tensorRelu(var_9); 
-      void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-      void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-      void* var_14 = tensorRelu(var_13); 
-      void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-      void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-      void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-      void* var_18 = tensorRelu(var_17); 
-      void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-      void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-      void* var_22 = tensorRelu(var_21); 
-      void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-      void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-      void* var_26 = tensorRelu(var_25); 
-      void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-      void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-      void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-      void* var_30 = tensorRelu(var_29); 
-      void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-      void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-      void* var_34 = tensorRelu(var_33); 
-      void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-      void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-      void* var_38 = tensorRelu(var_37); 
-      void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-      void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-      void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-      void* var_42 = tensorRelu(var_41); 
-      void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-      void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-      void* var_46 = tensorRelu(var_45); 
-      void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-      void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-      void* var_50 = tensorRelu(var_49); 
-      void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-      void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-      void* var_55 = tensorAdd(var_54, dense_1_b); 
-      void* var_56 = tensorRelu(var_55); 
-      void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-      void* var_59 = tensorAdd(var_58, dense_2_b); 
-      void* var_60 = tensorSoftmax(var_59); 
-
-      uint8_t* labels = readLabelsBatch(labels_path.c_str(), start, end); 
-
-      float accuracy = computeAccuracy2(labels,batch_size,var_60); 
-      final_accuracy += accuracy;
-    
-      freeBatchMemory();
-    }
-
-    final_accuracy = final_accuracy / batch_count;
-    dumpFinalAccuracy(final_accuracy);
-  }
-
-  dumpExecutionAccuracies();
-  
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/dnn_sources/tuning_src/fc_network_acc.cc b/llvm/projects/hpvm-tensor-rt/dnn_sources/tuning_src/fc_network_acc.cc
deleted file mode 100644
index 7ab357e4ac6e8b6550a71b14ce73c79e20879cf3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/dnn_sources/tuning_src/fc_network_acc.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-
-
-#include <stdio.h>
-#include <cstdlib>
-#include <stdlib.h>
-#include <unistd.h>
-
-#include "../../tensor_runtime/include/tensor_runtime.h"
-#include "../include/utils.h"
-#include "../include/types.h"
-#include "../include/op_overheads.h"
-
-
-
-
-/* NOTE: Reference Architecture to use for profiling */
-void testFCNetworkArch(int op1_acc, int op2_acc, int op3_acc,
-		       int op4_acc, int op5_acc, int op6_acc, int op7_acc){
-
-  printf("********* Fully Connected DNN-1 - Accuracy Tuned ********* \n");
-  // FIXIT: Extend this to batch of images - currently 5 images
-
-  int test_batch_size = 10000; 
-  void* input = readTrainedWeights("../model_params/FC_network2/mnist_float_input.bin",
-  					    float_type, test_batch_size, 1, 28, 28);  
-
-  void* fc1_weights = readTrainedWeights("../model_params/FC_network2/fc1.bin",
-						  float_type, 1, 1, 784, 128);  
-  void* fc1_bias = readTrainedWeights("../model_params/FC_network2/fc1_bias.bin",
-					       float_type, 1, 128, 1, 1);  
-  void* fc2_weights = readTrainedWeights("../model_params/FC_network2/fc2.bin",
-						  float_type, 1, 1, 128, 10);  
-  void* fc2_bias = readTrainedWeights("../model_params/FC_network2/fc2_bias.bin",
-					       float_type, 1, 10, 1, 1);  
-
-  // Start execution profiling Tensor ops
-  startProfiling();
-  
-  // Layer-1
-  void* fc1out = tensorGemmGPU(input, fc1_weights);  
-  void* error_norms = tensorAddError(fc1out, op1_acc);
-  add_norms(error_norms);
-  add_gemm_overheads(input, fc1_weights, op1_acc);
-  
-  void* fc1_bias_out = tensorAdd(fc1out, fc1_bias);
-  error_norms = tensorAddError(fc1_bias_out, op2_acc);
-  add_norms(error_norms);
-  add_bias_overheads(fc1_bias_out, op2_acc);
-
-  void* fc1_relu = tensorRelu(fc1_bias_out);
-  error_norms = tensorAddError(fc1_relu, op3_acc);
-  add_norms(error_norms);
-  add_relu_overheads(fc1_relu, op3_acc);
- 
-  // Layer-2
-  void* fc2out = tensorGemmGPU(fc1_relu, fc2_weights);  
-  error_norms = tensorAddError(fc2out, op4_acc);
-  add_norms(error_norms);
-  add_gemm_overheads(fc1_relu, fc2_weights, op4_acc);
-  
-  void* fc2_bias_out = tensorAdd(fc2out, fc2_bias);
-  error_norms = tensorAddError(fc2_bias_out, op5_acc);
-  add_norms(error_norms);
-  add_bias_overheads(fc2_bias_out, op5_acc);
-
-  void* fc2_relu = tensorRelu(fc2_bias_out);
-  error_norms = tensorAddError(fc2_relu, op6_acc);
-  add_norms(error_norms);
-  add_relu_overheads(fc2_relu, op6_acc);
-  
-  void* result = tensorSoftmax(fc2_relu);
-  error_norms = tensorAddError(result, op7_acc);
-  add_norms(error_norms);
-  add_bias_overheads(result, op7_acc);
-  
-  stopProfiling();
-  
-  computeAccuracy("../model_params/lenet_params/datasets/t10k-labels-idx1-ubyte", test_batch_size, result);
-
-  dump_result("accuracy_summary");
-}
-
-
-
-int main(int argc, char* argv[]){
-
-  if(argc < 8){
-    printf("Must provide 7 knobs for accuracy tuning \n");
-    abort();
-  }
-  
-  // This initializes the runtime - must be called before anything
-  llvm_hpvm_initTensorRt(0);
-
-  int op1_acc = atoi(argv[1]);
-  int op2_acc = atoi(argv[2]);
-  int op3_acc = atoi(argv[3]);
-  int op4_acc = atoi(argv[4]);
-  int op5_acc = atoi(argv[5]);
-  int op6_acc = atoi(argv[6]);
-  int op7_acc = atoi(argv[7]);
-  
-  testFCNetworkArch(op1_acc, op2_acc, op3_acc,
-		    op4_acc, op5_acc, op6_acc, op7_acc);
-
-  return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/lib/tensor_runtime.ll b/llvm/projects/hpvm-tensor-rt/lib/tensor_runtime.ll
deleted file mode 100644
index 89c8da90f8ab740062bd84cdd365baa67311a7a4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/lib/tensor_runtime.ll
+++ /dev/null
@@ -1,201 +0,0 @@
-; ModuleID = '/home/hsharif3/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt//lib/tensor_runtime.bc'
-source_filename = "/home/hsharif3/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt//tensor_runtime/include/tensor_signatures.cc"
-target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
-target triple = "x86_64-unknown-linux-gnu"
-
-; Function Attrs: noinline nounwind uwtable
-define void @_Z13dummyFunctionv() #0 {
-entry:
-  %initRT = alloca i8*, align 8
-  %cleanRT = alloca i8*, align 8
-  %initApproxRT = alloca i8*, align 8
-  %cleanApproxRT = alloca i8*, align 8
-  %initRTController = alloca i8*, align 8
-  %cleanRTController = alloca i8*, align 8
-  %request_tensorPtr = alloca i8*, align 8
-  %startProf = alloca i8*, align 8
-  %stopProf = alloca i8*, align 8
-  %create2Dptr = alloca i8*, align 8
-  %create3Dptr = alloca i8*, align 8
-  %create4Dptr = alloca i8*, align 8
-  %initTensorPtr = alloca i8*, align 8
-  %tensorSplitPtr = alloca i8*, align 8
-  %tensorConcatPtr = alloca i8*, align 8
-  %tensorConvPtr = alloca i8*, align 8
-  %tensorHConvPtr = alloca i8*, align 8
-  %tensorPoolPtr = alloca i8*, align 8
-  %tensorHalfPoolPtr = alloca i8*, align 8
-  %tensorLRNPtr = alloca i8*, align 8
-  %tensorGemmPr = alloca i8*, align 8
-  %tensorGemmCPUPtr = alloca i8*, align 8
-  %tensorGemmGPUPtr = alloca i8*, align 8
-  %tensorHgemmPtr = alloca i8*, align 8
-  %tensorGemmBiasPtr = alloca i8*, align 8
-  %tensorAddPtr = alloca i8*, align 8
-  %tensorHalfAddPtr = alloca i8*, align 8
-  %tensorReluPtr = alloca i8*, align 8
-  %tensorRelu2Ptr = alloca i8*, align 8
-  %tensorHalfRelu2Ptr = alloca i8*, align 8
-  %tensorTanhPtr = alloca i8*, align 8
-  %tensorHalfTanhPtr = alloca i8*, align 8
-  %tensorSoftmaxPtr = alloca i8*, align 8
-  %tensorBatchNormPtr = alloca i8*, align 8
-  %tensorAddErrorPtr = alloca i8*, align 8
-  %ConvLayer = alloca i8*, align 8
-  %FCLayer = alloca i8*, align 8
-  %ConvLayer2 = alloca i8*, align 8
-  %ConvLayer3 = alloca i8*, align 8
-  %FCLayer2 = alloca i8*, align 8
-  %AddWrapper = alloca i8*, align 8
-  %ReluWrapper = alloca i8*, align 8
-  %TanhWrapper = alloca i8*, align 8
-  %BatchNormWrapper = alloca i8*, align 8
-  %PoolingWrapper = alloca i8*, align 8
-  %softmaxWrapper = alloca i8*, align 8
-  store i8* bitcast (void (i32)* @llvm_hpvm_initTensorRt to i8*), i8** %initRT, align 8
-  store i8* bitcast (void ()* @llvm_hpvm_cleanupTensorRt to i8*), i8** %cleanRT, align 8
-  store i8* bitcast (void (i32)* @llvm_hpvm_initApproxhpvmRt to i8*), i8** %initApproxRT, align 8
-  store i8* bitcast (void ()* @llvm_hpvm_cleanupApproxhpvmRt to i8*), i8** %cleanApproxRT, align 8
-  store i8* bitcast (void (i8*, i8*)* @llvm_hpvm_initializeRuntimeController to i8*), i8** %initRTController, align 8
-  store i8* bitcast (void ()* @llvm_hpvm_clearRuntimeController to i8*), i8** %cleanRTController, align 8
-  store i8* bitcast (void (i8*, i32)* @hpvm_request_tensor to i8*), i8** %request_tensorPtr, align 8
-  store i8* bitcast (void ()* @startProfiling to i8*), i8** %startProf, align 8
-  store i8* bitcast (void ()* @stopProfiling to i8*), i8** %stopProf, align 8
-  store i8* bitcast (i8* (i32, i64, i64)* @create2DTensor to i8*), i8** %create2Dptr, align 8
-  store i8* bitcast (i8* (i32, i64, i64, i64)* @create3DTensor to i8*), i8** %create3Dptr, align 8
-  store i8* bitcast (i8* (i32, i32, i64, i64, i64, i64)* @create4DTensor to i8*), i8** %create4Dptr, align 8
-  store i8* bitcast (void (i8*, i8*, i64)* @initTensorData to i8*), i8** %initTensorPtr, align 8
-  store i8* bitcast (i8** (i8*, i32, i32)* @tensorSplit to i8*), i8** %tensorSplitPtr, align 8
-  store i8* bitcast (i8* (i8**, i32, i32)* @tensorConcat to i8*), i8** %tensorConcatPtr, align 8
-  store i8* bitcast (i8* (i8*, i8*, i32, i32, i32, i32, i32, i32)* @tensorConvolution to i8*), i8** %tensorConvPtr, align 8
-  store i8* bitcast (i8* (i8*, i8*, i32, i32, i32, i32, i32, i32)* @tensorHalfConvolution to i8*), i8** %tensorHConvPtr, align 8
-  store i8* bitcast (i8* (i8*, i32, i32, i32, i32, i32, i32, i32)* @tensorPooling to i8*), i8** %tensorPoolPtr, align 8
-  store i8* bitcast (i8* (i8*, i32, i32, i32, i32, i32, i32, i32)* @tensorHalfPooling to i8*), i8** %tensorHalfPoolPtr, align 8
-  store i8* bitcast (i8* (i8*, i32, double, double, double)* @tensorLRN to i8*), i8** %tensorLRNPtr, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @tensorGemm to i8*), i8** %tensorGemmPr, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @tensorGemmCPU to i8*), i8** %tensorGemmCPUPtr, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @tensorGemmGPU to i8*), i8** %tensorGemmGPUPtr, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @tensorHalfGemm to i8*), i8** %tensorHgemmPtr, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @tensorGemmBias to i8*), i8** %tensorGemmBiasPtr, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @tensorAdd to i8*), i8** %tensorAddPtr, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @tensorHalfAdd to i8*), i8** %tensorHalfAddPtr, align 8
-  store i8* bitcast (i8* (i8*)* @tensorRelu to i8*), i8** %tensorReluPtr, align 8
-  store i8* bitcast (i8* (i8*, float, float)* @tensorRelu2 to i8*), i8** %tensorRelu2Ptr, align 8
-  store i8* bitcast (i8* (i8*, float, float)* @tensorHalfRelu2 to i8*), i8** %tensorHalfRelu2Ptr, align 8
-  store i8* bitcast (i8* (i8*)* @tensorTanh to i8*), i8** %tensorTanhPtr, align 8
-  store i8* bitcast (i8* (i8*)* @tensorHalfTanh to i8*), i8** %tensorHalfTanhPtr, align 8
-  store i8* bitcast (i8* (i8*)* @tensorSoftmax to i8*), i8** %tensorSoftmaxPtr, align 8
-  store i8* bitcast (i8* (i8*, i8*, i8*, i8*, i8*, double)* @tensorBatchNorm to i8*), i8** %tensorBatchNormPtr, align 8
-  store i8* bitcast (i8* (i8*, i32)* @tensorAddError to i8*), i8** %tensorAddErrorPtr, align 8
-  store i8* bitcast (i8* (i8*, float, float, i8*, float, float, i8*, float, float, i32, i32, i32, i32, i32, i32, i32, float, float, i32)* @ConvLayer_PROMISE to i8*), i8** %ConvLayer, align 8
-  store i8* bitcast (i8* (i8*, float, float, i8*, float, float, i8*, float, float, i32, float, float, i32)* @FCLayer_PROMISE to i8*), i8** %FCLayer, align 8
-  store i8* bitcast (i8* (i8*, i8*, i8*, i8*, i32, i32, i32, i32, i32, i32, i32, float, float)* @wrapper_ConvLayer to i8*), i8** %ConvLayer2, align 8
-  store i8* bitcast (i8* (i8*, i8*, i8*, i32, i32, i32, i32, i32, i32)* @wrapper_tensorGroupConvolution to i8*), i8** %ConvLayer3, align 8
-  store i8* bitcast (i8* (i8*, i8*, i8*, i8*, i32, float, float)* @wrapper_FCLayer to i8*), i8** %FCLayer2, align 8
-  store i8* bitcast (i8* (i8*, i8*, i8*)* @wrapper_tensorAdd to i8*), i8** %AddWrapper, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @wrapper_tensorRelu to i8*), i8** %ReluWrapper, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @wrapper_tensorTanh to i8*), i8** %TanhWrapper, align 8
-  store i8* bitcast (i8* (i8*, i8*, i8*, i8*, i8*, i8*, double)* @wrapper_tensorBatchNorm to i8*), i8** %BatchNormWrapper, align 8
-  store i8* bitcast (i8* (i8*, i8*, i32, i32, i32, i32, i32, i32, i32)* @wrapper_tensorPooling to i8*), i8** %PoolingWrapper, align 8
-  store i8* bitcast (i8* (i8*, i8*)* @wrapper_tensorSoftmax to i8*), i8** %softmaxWrapper, align 8
-  ret void
-}
-
-declare void @llvm_hpvm_initTensorRt(i32) #1
-
-declare void @llvm_hpvm_cleanupTensorRt() #1
-
-declare void @llvm_hpvm_initApproxhpvmRt(i32) #1
-
-declare void @llvm_hpvm_cleanupApproxhpvmRt() #1
-
-declare void @llvm_hpvm_initializeRuntimeController(i8*, i8*) #1
-
-declare void @llvm_hpvm_clearRuntimeController() #1
-
-declare void @hpvm_request_tensor(i8*, i32) #1
-
-declare void @startProfiling() #1
-
-declare void @stopProfiling() #1
-
-declare i8* @create2DTensor(i32, i64, i64) #1
-
-declare i8* @create3DTensor(i32, i64, i64, i64) #1
-
-declare i8* @create4DTensor(i32, i32, i64, i64, i64, i64) #1
-
-declare void @initTensorData(i8*, i8*, i64) #1
-
-declare i8** @tensorSplit(i8*, i32, i32) #1
-
-declare i8* @tensorConcat(i8**, i32, i32) #1
-
-declare i8* @tensorConvolution(i8*, i8*, i32, i32, i32, i32, i32, i32) #1
-
-declare i8* @tensorHalfConvolution(i8*, i8*, i32, i32, i32, i32, i32, i32) #1
-
-declare i8* @tensorPooling(i8*, i32, i32, i32, i32, i32, i32, i32) #1
-
-declare i8* @tensorHalfPooling(i8*, i32, i32, i32, i32, i32, i32, i32) #1
-
-declare i8* @tensorLRN(i8*, i32, double, double, double) #1
-
-declare i8* @tensorGemm(i8*, i8*) #1
-
-declare i8* @tensorGemmCPU(i8*, i8*) #1
-
-declare i8* @tensorGemmGPU(i8*, i8*) #1
-
-declare i8* @tensorHalfGemm(i8*, i8*) #1
-
-declare i8* @tensorGemmBias(i8*, i8*) #1
-
-declare i8* @tensorAdd(i8*, i8*) #1
-
-declare i8* @tensorHalfAdd(i8*, i8*) #1
-
-declare i8* @tensorRelu(i8*) #1
-
-declare i8* @tensorRelu2(i8*, float, float) #1
-
-declare i8* @tensorHalfRelu2(i8*, float, float) #1
-
-declare i8* @tensorTanh(i8*) #1
-
-declare i8* @tensorHalfTanh(i8*) #1
-
-declare i8* @tensorSoftmax(i8*) #1
-
-declare i8* @tensorBatchNorm(i8*, i8*, i8*, i8*, i8*, double) #1
-
-declare i8* @tensorAddError(i8*, i32) #1
-
-declare i8* @ConvLayer_PROMISE(i8*, float, float, i8*, float, float, i8*, float, float, i32, i32, i32, i32, i32, i32, i32, float, float, i32) #1
-
-declare i8* @FCLayer_PROMISE(i8*, float, float, i8*, float, float, i8*, float, float, i32, float, float, i32) #1
-
-declare i8* @wrapper_ConvLayer(i8*, i8*, i8*, i8*, i32, i32, i32, i32, i32, i32, i32, float, float) #1
-
-declare i8* @wrapper_tensorGroupConvolution(i8*, i8*, i8*, i32, i32, i32, i32, i32, i32) #1
-
-declare i8* @wrapper_FCLayer(i8*, i8*, i8*, i8*, i32, float, float) #1
-
-declare i8* @wrapper_tensorAdd(i8*, i8*, i8*) #1
-
-declare i8* @wrapper_tensorRelu(i8*, i8*) #1
-
-declare i8* @wrapper_tensorTanh(i8*, i8*) #1
-
-declare i8* @wrapper_tensorBatchNorm(i8*, i8*, i8*, i8*, i8*, i8*, double) #1
-
-declare i8* @wrapper_tensorPooling(i8*, i8*, i32, i32, i32, i32, i32, i32, i32) #1
-
-declare i8* @wrapper_tensorSoftmax(i8*, i8*) #1
-
-attributes #0 = { noinline nounwind uwtable "correctly-rounded-divide-sqrt-fp-math"="false" "disable-tail-calls"="false" "less-precise-fpmad"="false" "no-frame-pointer-elim"="true" "no-frame-pointer-elim-non-leaf" "no-infs-fp-math"="false" "no-jump-tables"="false" "no-nans-fp-math"="false" "no-signed-zeros-fp-math"="false" "no-trapping-math"="false" "stack-protector-buffer-size"="8" "target-cpu"="x86-64" "target-features"="+fxsr,+mmx,+sse,+sse2,+x87" "unsafe-fp-math"="false" "use-soft-float"="false" }
-attributes #1 = { "correctly-rounded-divide-sqrt-fp-math"="false" "disable-tail-calls"="false" "less-precise-fpmad"="false" "no-frame-pointer-elim"="true" "no-frame-pointer-elim-non-leaf" "no-infs-fp-math"="false" "no-nans-fp-math"="false" "no-signed-zeros-fp-math"="false" "no-trapping-math"="false" "stack-protector-buffer-size"="8" "target-cpu"="x86-64" "target-features"="+fxsr,+mmx,+sse,+sse2,+x87" "unsafe-fp-math"="false" "use-soft-float"="false" }
-
-!llvm.ident = !{!0}
-
-!0 = !{!"clang version 4.0.1 "}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc1.bin
deleted file mode 100644
index 17b5b1e6bfbccd08a42fdf7ee241a7742e764ffb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc1_bias.bin
deleted file mode 100644
index 5187944d335d316a3d5a4015d7da69e425878347..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc2.bin
deleted file mode 100644
index fe15f7c890cdc9c6e1afd83dc50b8c1308a55dcc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc2_bias.bin
deleted file mode 100644
index 3c7278911331c715598268586202b1d95aa5ef58..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc3.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc3.bin
deleted file mode 100644
index a11cdfff7f2e6f1f70fc37e8c0da9b3997116f27..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc3_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc3_bias.bin
deleted file mode 100644
index 493f78d98eec57da2bb3004079a64f0584ea60d9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc3_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc4.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc4.bin
deleted file mode 100644
index 4f940102eb8a17051c44e8fe12a6b7730a61c15d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc4.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc4_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc4_bias.bin
deleted file mode 100644
index fd3305efb194f19475cb0a260f845efc8bd986e7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/FC_network1/fc4_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-Ǜ±¿žÓ?Ü‚?Sˆ¾6;bZö>c÷”¿¡™Ê<`–i?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc1.bin
deleted file mode 100644
index d24151936ec9c89e260439d55edf42d2dc55723f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc1_bias.bin
deleted file mode 100644
index 9cca044999ee589bbfdfada84db040751559cd26..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc2.bin
deleted file mode 100644
index 7fc42c720505d595c80dc426cc739dcdc5e5c7e2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc2_bias.bin
deleted file mode 100644
index eb2e7856b366465f0574ad68d71c88432d021b27..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/fc2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-°F¾Êgw>I‹q<?N½»ƒ¾Ò…¹>	„m½É > Ý†¾ÝH/½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/mnist_float_input.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/mnist_float_input.bin
deleted file mode 100644
index 779dcf7f6ad72f3e22d5c96148d2f0f7e11e39b8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network2/mnist_float_input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc1.bin
deleted file mode 100644
index 3a2acb0bdfd5b2073eaad1ec51b99faf5a60ae07..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc1_bias.bin
deleted file mode 100644
index 6b20a053997e30f768995fbf4f27ab6b04f8403f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc2.bin
deleted file mode 100644
index dd955fc10e8727ed4bf41100f7b74d2026d0cad6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc2_bias.bin
deleted file mode 100644
index e225b78da4cf02f9500020edbd2a6f68a03ab7aa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc3.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc3.bin
deleted file mode 100644
index d3b5c08ee1330f8f33d24f12aad8c78437a54a28..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc3_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc3_bias.bin
deleted file mode 100644
index e3a7f0b8f93603a474ac3ef23a48b550de1e2327..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc3_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc4.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc4.bin
deleted file mode 100644
index f92c14bde53bd000f8d3f7993373fe6797dd4921..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc4.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc4_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc4_bias.bin
deleted file mode 100644
index 6459c295b4dc106194c17ce55c363caded6b8bc8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/FC_network3/fc4_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv1.bin
deleted file mode 100644
index 2f19c0f9435e2ce6c8f1e9b502faf633d178da0c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv1_bias.bin
deleted file mode 100644
index bf41661c9e6d173175f226fcabfe2233b579809b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv2.bin
deleted file mode 100644
index 92a39675e8da83e41c612695ec60877f7f40d925..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv2_bias.bin
deleted file mode 100644
index 8a83ce22c0ba6aa30837de0f86d7a18e0f679792..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
- !¶¾Â®€¾)`…¾àDоd¾([¾A6…¾4€¾òû”¾^Ød¾å㤾ªf¾t¾)\¾ñdv¾6EH¾zíR¾!†¾a¾­;¾4°{¾óÉU¾ïbH¾ÏJ¾»ÿ<¾˜2‘¾Ü`¾{Hw¾”Wd¾Øö0¾ÏÃ`¾ž‡l¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv3.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv3.bin
deleted file mode 100644
index bb59b47987bd796b22faa02b6bf9508c166ef2e0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv3_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv3_bias.bin
deleted file mode 100644
index e1f7a16ed65bdd9e40a4c96b6310e2da605df9c8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv3_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv4.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv4.bin
deleted file mode 100644
index 1870b8f4982ff75d21dd978177aeedad82a74554..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv4.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv4_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv4_bias.bin
deleted file mode 100644
index 3c5508709a01f293aed21a1ae94fdcf171e706f5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv4_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv5.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv5.bin
deleted file mode 100644
index 89c50f51bb3f9e9cbd3e9082d24be1bd460cdb9b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv5.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv5_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv5_bias.bin
deleted file mode 100644
index 6d43fb53fe93405422aa09da68b22e41eaec615b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv5_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv6.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv6.bin
deleted file mode 100644
index 5220270e4aae3adf3c7b6c24a1480f595b036fc8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv6.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv6_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv6_bias.bin
deleted file mode 100644
index 5425f434ba456b735482110d641067a645ee0843..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/conv6_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/fc1.bin
deleted file mode 100644
index 2dc81917a1267633a1c7ffa37b877126b1272b2c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/fc1_bias.bin
deleted file mode 100644
index 22a3fb3a7a21e4966d7bdb701338f9ad241c4384..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/fc1_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-g¡b½âdÖ>Šé?7Å>*8{¾`V9=S–	¿ŽC½u´ë½¾_¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/norm_cifar_input.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/norm_cifar_input.bin
deleted file mode 100644
index 8b244c89d3503750d6fe23e82cda5493ef3a259e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/norm_cifar_input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/test_labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/test_labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10/test_labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/approxhpvm_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/approxhpvm_src.cc
deleted file mode 100644
index 0b241dbc4efd53909da1f0f5046c9794d23314b1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/approxhpvm_src.cc
+++ /dev/null
@@ -1,500 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/stat.h> 
-#include <cstring> 
-#include <visc.h> 
-#include <tensorTypes.h> 
-#include <tensorUtils.h> 
-
-void var_0_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_1_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_2_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_3_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_4_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_5_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_6_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_7_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_8_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_9_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_10_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_11_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_12_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_13_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_14_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_15_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_16_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_17_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_18_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_19_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_20_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_21_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_mul(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_22_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_23_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_softmax(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void root(void* input, size_t input_bytes, 
-	  void* conv2d_1_w, size_t conv2d_1_w_bytes, 
-	  void* conv2d_1_b, size_t conv2d_1_b_bytes, 
-	  void* conv2d_2_w, size_t conv2d_2_w_bytes, 
-	  void* conv2d_2_b, size_t conv2d_2_b_bytes, 
-	  void* conv2d_3_w, size_t conv2d_3_w_bytes, 
-	  void* conv2d_3_b, size_t conv2d_3_b_bytes, 
-	  void* conv2d_4_w, size_t conv2d_4_w_bytes, 
-	  void* conv2d_4_b, size_t conv2d_4_b_bytes, 
-	  void* conv2d_5_w, size_t conv2d_5_w_bytes, 
-	  void* conv2d_5_b, size_t conv2d_5_b_bytes, 
-	  void* conv2d_6_w, size_t conv2d_6_w_bytes, 
-	  void* conv2d_6_b, size_t conv2d_6_b_bytes, 
-	  void* dense_1_w, size_t dense_1_w_bytes, 
-	  void* dense_1_b, size_t dense_1_b_bytes){ 
-
-
-  __visc__hint(visc::CPU_TARGET); 
-  __visc__attributes(15, input, conv2d_1_w, conv2d_1_b, conv2d_2_w, conv2d_2_b, conv2d_3_w, conv2d_3_b, conv2d_4_w, conv2d_4_b, conv2d_5_w, conv2d_5_b, conv2d_6_w, conv2d_6_b, dense_1_w, dense_1_b, 0); 
-
-
-  void* var_0 = __visc__createNodeND(0, var_0_node); 
-
-  __visc__bindIn(var_0, 0, 0, 0); 
-  __visc__bindIn(var_0, 1, 1, 0); 
-  __visc__bindIn(var_0, 2, 2, 0); 
-  __visc__bindIn(var_0, 3, 3, 0); 
-
-  void* var_1 = __visc__createNodeND(0, var_1_node); 
-
-  __visc__edge(var_0, var_1, 1, 0, 0, 0); 
-  __visc__edge(var_0, var_1, 1, 1, 1, 0); 
-  __visc__bindIn(var_1, 4, 2, 0); 
-  __visc__bindIn(var_1, 5, 3, 0); 
-
-  void* var_2 = __visc__createNodeND(0, var_2_node); 
-
-  __visc__edge(var_1, var_2, 1, 0, 0, 0); 
-  __visc__edge(var_1, var_2, 1, 1, 1, 0); 
-
-  void* var_3 = __visc__createNodeND(0, var_3_node); 
-
-  __visc__edge(var_2, var_3, 1, 0, 0, 0); 
-  __visc__edge(var_2, var_3, 1, 1, 1, 0); 
-  __visc__bindIn(var_3, 6, 2, 0); 
-  __visc__bindIn(var_3, 7, 3, 0); 
-
-  void* var_4 = __visc__createNodeND(0, var_4_node); 
-
-  __visc__edge(var_3, var_4, 1, 0, 0, 0); 
-  __visc__edge(var_3, var_4, 1, 1, 1, 0); 
-  __visc__bindIn(var_4, 8, 2, 0); 
-  __visc__bindIn(var_4, 9, 3, 0); 
-
-  void* var_5 = __visc__createNodeND(0, var_5_node); 
-
-  __visc__edge(var_4, var_5, 1, 0, 0, 0); 
-  __visc__edge(var_4, var_5, 1, 1, 1, 0); 
-
-  void* var_6 = __visc__createNodeND(0, var_6_node); 
-
-  __visc__edge(var_5, var_6, 1, 0, 0, 0); 
-  __visc__edge(var_5, var_6, 1, 1, 1, 0); 
-
-  void* var_7 = __visc__createNodeND(0, var_7_node); 
-
-  __visc__edge(var_6, var_7, 1, 0, 0, 0); 
-  __visc__edge(var_6, var_7, 1, 1, 1, 0); 
-  __visc__bindIn(var_7, 10, 2, 0); 
-  __visc__bindIn(var_7, 11, 3, 0); 
-
-  void* var_8 = __visc__createNodeND(0, var_8_node); 
-
-  __visc__edge(var_7, var_8, 1, 0, 0, 0); 
-  __visc__edge(var_7, var_8, 1, 1, 1, 0); 
-  __visc__bindIn(var_8, 12, 2, 0); 
-  __visc__bindIn(var_8, 13, 3, 0); 
-
-  void* var_9 = __visc__createNodeND(0, var_9_node); 
-
-  __visc__edge(var_8, var_9, 1, 0, 0, 0); 
-  __visc__edge(var_8, var_9, 1, 1, 1, 0); 
-
-  void* var_10 = __visc__createNodeND(0, var_10_node); 
-
-  __visc__edge(var_9, var_10, 1, 0, 0, 0); 
-  __visc__edge(var_9, var_10, 1, 1, 1, 0); 
-  __visc__bindIn(var_10, 14, 2, 0); 
-  __visc__bindIn(var_10, 15, 3, 0); 
-
-  void* var_11 = __visc__createNodeND(0, var_11_node); 
-
-  __visc__edge(var_10, var_11, 1, 0, 0, 0); 
-  __visc__edge(var_10, var_11, 1, 1, 1, 0); 
-  __visc__bindIn(var_11, 16, 2, 0); 
-  __visc__bindIn(var_11, 17, 3, 0); 
-
-  void* var_12 = __visc__createNodeND(0, var_12_node); 
-
-  __visc__edge(var_11, var_12, 1, 0, 0, 0); 
-  __visc__edge(var_11, var_12, 1, 1, 1, 0); 
-
-  void* var_13 = __visc__createNodeND(0, var_13_node); 
-
-  __visc__edge(var_12, var_13, 1, 0, 0, 0); 
-  __visc__edge(var_12, var_13, 1, 1, 1, 0); 
-
-  void* var_14 = __visc__createNodeND(0, var_14_node); 
-
-  __visc__edge(var_13, var_14, 1, 0, 0, 0); 
-  __visc__edge(var_13, var_14, 1, 1, 1, 0); 
-  __visc__bindIn(var_14, 18, 2, 0); 
-  __visc__bindIn(var_14, 19, 3, 0); 
-
-  void* var_15 = __visc__createNodeND(0, var_15_node); 
-
-  __visc__edge(var_14, var_15, 1, 0, 0, 0); 
-  __visc__edge(var_14, var_15, 1, 1, 1, 0); 
-  __visc__bindIn(var_15, 20, 2, 0); 
-  __visc__bindIn(var_15, 21, 3, 0); 
-
-  void* var_16 = __visc__createNodeND(0, var_16_node); 
-
-  __visc__edge(var_15, var_16, 1, 0, 0, 0); 
-  __visc__edge(var_15, var_16, 1, 1, 1, 0); 
-
-  void* var_17 = __visc__createNodeND(0, var_17_node); 
-
-  __visc__edge(var_16, var_17, 1, 0, 0, 0); 
-  __visc__edge(var_16, var_17, 1, 1, 1, 0); 
-  __visc__bindIn(var_17, 22, 2, 0); 
-  __visc__bindIn(var_17, 23, 3, 0); 
-
-  void* var_18 = __visc__createNodeND(0, var_18_node); 
-
-  __visc__edge(var_17, var_18, 1, 0, 0, 0); 
-  __visc__edge(var_17, var_18, 1, 1, 1, 0); 
-  __visc__bindIn(var_18, 24, 2, 0); 
-  __visc__bindIn(var_18, 25, 3, 0); 
-
-  void* var_19 = __visc__createNodeND(0, var_19_node); 
-
-  __visc__edge(var_18, var_19, 1, 0, 0, 0); 
-  __visc__edge(var_18, var_19, 1, 1, 1, 0); 
-
-  void* var_20 = __visc__createNodeND(0, var_20_node); 
-
-  __visc__edge(var_19, var_20, 1, 0, 0, 0); 
-  __visc__edge(var_19, var_20, 1, 1, 1, 0); 
-
-  void* var_21 = __visc__createNodeND(0, var_21_node); 
-
-  __visc__edge(var_20, var_21, 1, 0, 0, 0); 
-  __visc__edge(var_20, var_21, 1, 1, 1, 0); 
-  __visc__bindIn(var_21, 26, 2, 0); 
-  __visc__bindIn(var_21, 27, 3, 0); 
-
-  void* var_22 = __visc__createNodeND(0, var_22_node); 
-
-  __visc__edge(var_21, var_22, 1, 0, 0, 0); 
-  __visc__edge(var_21, var_22, 1, 1, 1, 0); 
-  __visc__bindIn(var_22, 28, 2, 0); 
-  __visc__bindIn(var_22, 29, 3, 0); 
-
-  void* var_23 = __visc__createNodeND(0, var_23_node); 
-
-  __visc__edge(var_22, var_23, 1, 0, 0, 0); 
-  __visc__edge(var_22, var_23, 1, 1, 1, 0); 
-
-  __visc__bindOut(var_23, 0, 0, 0); 
-  __visc__bindOut(var_23, 1, 1, 0); 
-
-}
-
-struct ret_t {
-  void* tensor; 
-  size_t bytes; 
-}; 
-
-typedef struct __attribute__((__packed__)) {
-  void* input; 
-  size_t input_bytes; 
-  void* conv2d_1_w; 
-  size_t conv2d_1_w_bytes; 
-  void* conv2d_1_b; 
-  size_t conv2d_1_b_bytes; 
-  void* conv2d_2_w; 
-  size_t conv2d_2_w_bytes; 
-  void* conv2d_2_b; 
-  size_t conv2d_2_b_bytes; 
-  void* conv2d_3_w; 
-  size_t conv2d_3_w_bytes; 
-  void* conv2d_3_b; 
-  size_t conv2d_3_b_bytes; 
-  void* conv2d_4_w; 
-  size_t conv2d_4_w_bytes; 
-  void* conv2d_4_b; 
-  size_t conv2d_4_b_bytes; 
-  void* conv2d_5_w; 
-  size_t conv2d_5_w_bytes; 
-  void* conv2d_5_b; 
-  size_t conv2d_5_b_bytes; 
-  void* conv2d_6_w; 
-  size_t conv2d_6_w_bytes; 
-  void* conv2d_6_b; 
-  size_t conv2d_6_b_bytes; 
-  void* dense_1_w; 
-  size_t dense_1_w_bytes; 
-  void* dense_1_b; 
-  size_t dense_1_b_bytes; 
-
-  struct ret_t r; 
-}
-RootIn;
-
-int main(){ 
-
-std::string dir_prefix = std::string("alexnet2_cifar10_test/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,32,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,128,64,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,128,128,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,128,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-
-__visc__init(); 
-RootIn* args = static_cast<RootIn*>(malloc(sizeof(RootIn))); 
-
-args->input = input; 
-args->input_bytes = 0; 
-args->conv2d_1_w = conv2d_1_w; 
-args->conv2d_1_w_bytes = 0; 
-args->conv2d_1_b = conv2d_1_b; 
-args->conv2d_1_b_bytes = 0; 
-args->conv2d_2_w = conv2d_2_w; 
-args->conv2d_2_w_bytes = 0; 
-args->conv2d_2_b = conv2d_2_b; 
-args->conv2d_2_b_bytes = 0; 
-args->conv2d_3_w = conv2d_3_w; 
-args->conv2d_3_w_bytes = 0; 
-args->conv2d_3_b = conv2d_3_b; 
-args->conv2d_3_b_bytes = 0; 
-args->conv2d_4_w = conv2d_4_w; 
-args->conv2d_4_w_bytes = 0; 
-args->conv2d_4_b = conv2d_4_b; 
-args->conv2d_4_b_bytes = 0; 
-args->conv2d_5_w = conv2d_5_w; 
-args->conv2d_5_w_bytes = 0; 
-args->conv2d_5_b = conv2d_5_b; 
-args->conv2d_5_b_bytes = 0; 
-args->conv2d_6_w = conv2d_6_w; 
-args->conv2d_6_w_bytes = 0; 
-args->conv2d_6_b = conv2d_6_b; 
-args->conv2d_6_b_bytes = 0; 
-args->dense_1_w = dense_1_w; 
-args->dense_1_w_bytes = 0; 
-args->dense_1_b = dense_1_b; 
-args->dense_1_b_bytes = 0; 
-
-void* dfg = __visc__launch(0, root, (void*) args); 
-
-__visc__wait(dfg); 
-
-void *result = static_cast<RootIn*>(args)->input; 
-hpvm_request_tensor(result, 0); 
-
-__visc__cleanup(); 
- computeAccuracy2(labels, 10000, result); 
-return 0; 
-
-} 
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_1_b.bin
deleted file mode 100644
index bf41661c9e6d173175f226fcabfe2233b579809b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_1_w.bin
deleted file mode 100644
index 2f19c0f9435e2ce6c8f1e9b502faf633d178da0c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_2_b.bin
deleted file mode 100644
index 8a83ce22c0ba6aa30837de0f86d7a18e0f679792..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_2_b.bin
+++ /dev/null
@@ -1 +0,0 @@
- !¶¾Â®€¾)`…¾àDоd¾([¾A6…¾4€¾òû”¾^Ød¾å㤾ªf¾t¾)\¾ñdv¾6EH¾zíR¾!†¾a¾­;¾4°{¾óÉU¾ïbH¾ÏJ¾»ÿ<¾˜2‘¾Ü`¾{Hw¾”Wd¾Øö0¾ÏÃ`¾ž‡l¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_2_w.bin
deleted file mode 100644
index 92a39675e8da83e41c612695ec60877f7f40d925..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_3_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_3_b.bin
deleted file mode 100644
index e1f7a16ed65bdd9e40a4c96b6310e2da605df9c8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_3_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_3_w.bin
deleted file mode 100644
index bb59b47987bd796b22faa02b6bf9508c166ef2e0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_4_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_4_b.bin
deleted file mode 100644
index 3c5508709a01f293aed21a1ae94fdcf171e706f5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_4_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_4_w.bin
deleted file mode 100644
index 1870b8f4982ff75d21dd978177aeedad82a74554..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_5_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_5_b.bin
deleted file mode 100644
index 6d43fb53fe93405422aa09da68b22e41eaec615b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_5_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_5_w.bin
deleted file mode 100644
index 89c50f51bb3f9e9cbd3e9082d24be1bd460cdb9b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_6_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_6_b.bin
deleted file mode 100644
index 5425f434ba456b735482110d641067a645ee0843..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_6_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_6_w.bin
deleted file mode 100644
index 5220270e4aae3adf3c7b6c24a1480f595b036fc8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/dense_1_b.bin
deleted file mode 100644
index 22a3fb3a7a21e4966d7bdb701338f9ad241c4384..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/dense_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-g¡b½âdÖ>Šé?7Å>*8{¾`V9=S–	¿ŽC½u´ë½¾_¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/dense_1_w.bin
deleted file mode 100644
index 2dc81917a1267633a1c7ffa37b877126b1272b2c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/input.bin
deleted file mode 100644
index e933707338f2e2f6acd13b0cf319cfd5e050041a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/labels.bin
deleted file mode 100644
index 890186739b5a29237b41797dad86cc3853ea13b0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/labels32.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/labels32.bin
deleted file mode 100644
index 0f344f699707b2e15c4364f369235ca997930dfa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/labels32.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/layer_composition.txt b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/layer_composition.txt
deleted file mode 100644
index 793ca7fcaf00561403ac65ecc0ef5f9a1efb8c43..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/layer_composition.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-conv  add  activation  
-conv  add  activation  pool  
-conv  add  activation  
-conv  add  activation  pool  
-conv  add  activation  
-conv  add  activation  pool  
-dense  add  
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/layers.txt b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/layers.txt
deleted file mode 100644
index c179b38f70acd66fedfabc434ce3a637b5c9bf7a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/layers.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-Conv1,10000,3,32,32,32,3,3,3
-Conv2,10000,32,32,32,32,32,3,3
-Conv3,10000,32,16,16,64,32,3,3
-Conv4,10000,64,16,16,64,64,3,3
-Conv5,10000,64,8,8,128,64,3,3
-Conv6,10000,128,8,8,128,128,3,3
-FC1,10000,2048,2048,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/promise_src.cc
deleted file mode 100644
index b40e93d759c095c397c60ea4e4ee5bbf1ce512a5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/promise_src.cc
+++ /dev/null
@@ -1,98 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-int total_runs = 100; 
-for (int i = 0 ; i < total_runs; i++){ 
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-
-
-std::string dir_prefix = std::string("alexnet2_cifar10_test/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,32,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,128,64,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,128,128,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,128,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = ConvLayer_PROMISE(input, -1.8816435, 2.0934134, conv2d_1_w, -0.5421946, 0.3710851, conv2d_1_b, -0.06697306, 0.040868897, 1, 1, 1, 1, -1, 0, 0, -0.9998477, 0.99987465, 9); 
-void* var_1 = ConvLayer_PROMISE(var_0, -0.9998477, 0.99987465, conv2d_2_w, -0.42474225, 0.31460348, conv2d_2_b, -0.3557253, -0.17281663, 1, 1, 1, 1, 0, 2, 0, -0.99997115, 1.0, 9); 
-void* var_2 = ConvLayer_PROMISE(var_1, -0.99997115, 1.0, conv2d_3_w, -0.44134507, 0.79587924, conv2d_3_b, -0.80424446, 0.75330096, 1, 1, 1, 1, -1, 0, 0, -0.9999999, 1.0, 9); 
-void* var_3 = ConvLayer_PROMISE(var_2, -0.9999999, 1.0, conv2d_4_w, -0.2883836, 0.31025785, conv2d_4_b, -0.6353164, 0.29015934, 1, 1, 1, 1, 0, 2, 0, -0.9999999, 0.99999934, 9); 
-void* var_4 = ConvLayer_PROMISE(var_3, -0.9999999, 0.99999934, conv2d_5_w, -0.2792431, 0.37689754, conv2d_5_b, -1.1379756, 1.2391574, 1, 1, 1, 1, -1, 0, 0, -1.0, 1.0, 9); 
-void* var_5 = ConvLayer_PROMISE(var_4, -1.0, 1.0, conv2d_6_w, -0.27078503, 0.27942517, conv2d_6_b, -0.503003, 0.12762362, 1, 1, 1, 1, 0, 2, 0, -0.9999941, 0.9999964, 9); 
-void* var_6 = FCLayer_PROMISE(var_5, -0.9999941, 0.9999964, dense_1_w, -0.24273404, 0.5845544, dense_1_b, -0.53745, 0.558251, -1, -140.6419, 16.402884, 9); 
-void* var_7 = tensorSoftmax(var_6); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_7); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-}
-
-dumpExecutionAccuracies(); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/src.cc
deleted file mode 100644
index 097db4657f2cd5b8002f5965a12b178b55bfeb84..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet2_cifar10_test/src.cc
+++ /dev/null
@@ -1,106 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("alexnet2_cifar10_test/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,32,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,128,64,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,128,128,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,128,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-void* var_2 = tensorTanh(var_1); 
-void* var_3 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-void* var_4 = tensorAdd(var_3, conv2d_2_b); 
-void* var_5 = tensorTanh(var_4); 
-void* var_6 = tensorPooling(var_5,0,2,2,0,0,2,2); 
-void* var_8 = tensorConvolution(var_6, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-void* var_10 = tensorTanh(var_9); 
-void* var_11 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-void* var_12 = tensorAdd(var_11, conv2d_4_b); 
-void* var_13 = tensorTanh(var_12); 
-void* var_14 = tensorPooling(var_13,0,2,2,0,0,2,2); 
-void* var_16 = tensorConvolution(var_14, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-void* var_18 = tensorTanh(var_17); 
-void* var_19 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-void* var_20 = tensorAdd(var_19, conv2d_6_b); 
-void* var_21 = tensorTanh(var_20); 
-void* var_22 = tensorPooling(var_21,0,2,2,0,0,2,2); 
-void* var_25 = tensorGemmGPU(var_22, dense_1_w); 
-void* var_26 = tensorAdd(var_25, dense_1_b); 
-void* var_27 = tensorSoftmax(var_26); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_27); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv1.bin
deleted file mode 100644
index 90949bf5ce634285cc49260d2b9ec453995fb66c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv1_bias.bin
deleted file mode 100644
index b9f8b7a07ccf8d89c2719db1481d2c11850b92fc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv2.bin
deleted file mode 100644
index 5e95b524c67265ba5505d7b5f6efde69b7ffb0a4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv2_bias.bin
deleted file mode 100644
index a4acf554bd24678e586dfa3219b6613ab39f4273..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv3.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv3.bin
deleted file mode 100644
index 6753ea5f4569ab9b04c9fc2cd5b1a80066ea5388..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv3_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv3_bias.bin
deleted file mode 100644
index 302c71630f5ef500e1ada5953c06a3ff2ca9225b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv3_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv4.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv4.bin
deleted file mode 100644
index 8c350aa865218ceb1b97d15ed01e097561918f1e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv4.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv4_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv4_bias.bin
deleted file mode 100644
index 23110bc5bb43c60e4241913688e295fc5f85f8f3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv4_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv5.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv5.bin
deleted file mode 100644
index c64242698c22ac21549559f4f8d8c6c8bdd13d2b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv5.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv5_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv5_bias.bin
deleted file mode 100644
index a618430efa6f0d894332a775e973d3893dcee727..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/conv5_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/fc1.bin
deleted file mode 100644
index 2fb2657fc96895b4d95b697c20a9df72b3c99787..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/fc1_bias.bin
deleted file mode 100644
index caff7117a5e002b2a4877b8fe70b4b01328f6e26..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/fc1_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-”=[=»=æ9ËÍz=	Ù£<Œ‚<Ž£¼¼­S»j1%½CÅÓ»†ÍE½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/norm_cifar_input.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/norm_cifar_input.bin
deleted file mode 100644
index da6603283ee9157c0184b132bc7959d035daa0cc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/norm_cifar_input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/test_labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/test_labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/backup/test_labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv1.bin
deleted file mode 100644
index a7dfe76aeb185fdd9a511d139b5a5fcff1da03bf..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv1_bias.bin
deleted file mode 100644
index 7b633f3e1c9ba5063f43797a178046586b4b291c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv1_bias.bin
+++ /dev/null
@@ -1,5 +0,0 @@
-§¿Uïò¾cÉ¿ØÙ=¹£Y=ù>a‹+¿}å¾…á˾‚?—¾Ì>
Û7¿Á…#¿ÀO\=ˆš9¿È"¿¬Õ?JþݾÏë¾¢Lѽð)¿æÅþ¾‡¿a9¿>î%¿’.?H¶¿Ð
-D¿Õ¿¬‹	?a4¾šî¾º¿zûâ½@Jz¾C¡À¾>Ì»¾ë®=ë-¾­aû>w\¬>f
-ý¾h"¿=¾Ÿ¾@*¿¸;¾Ó)±¾ìÇ¿Ç¼‡>!
-¿‹¿Áó&¾ªî¾H 6>?î)¿aêö¾*¿±/ÿ¾OkžÛ¾ó1¿‡l¥¾fz
-¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv2.bin
deleted file mode 100644
index 7bfbc9f375b1b2574053e6b499116a0419c3f804..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv2_bias.bin
deleted file mode 100644
index 3a8c9423830785c2ee2956754e7285f7a6af8905..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv3.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv3.bin
deleted file mode 100644
index 8e5694e2d7c12e6f429d440d8d54ff0be35226fc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv3_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv3_bias.bin
deleted file mode 100644
index 0ba06b9a7a337b5ce458dc530c0eea68e1b2e034..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv3_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv4.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv4.bin
deleted file mode 100644
index 26d7d9d35661729d41fb7af159d0e53cad8edbe5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv4.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv4_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv4_bias.bin
deleted file mode 100644
index 46575667290585cf153a7276125b4cd0dba2a162..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv4_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv5.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv5.bin
deleted file mode 100644
index 4066edab066ce9f4172e7fa69e02446bd6e1d77c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv5.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv5_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv5_bias.bin
deleted file mode 100644
index 6d84414b530c70417ce7425d77f8b46facfdba69..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/conv5_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/fc1.bin
deleted file mode 100644
index c3754c155c1f5009156a13f2c8f30acdab5db0f0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/fc1_bias.bin
deleted file mode 100644
index 3f841c276156bfc688514aeb58bb3544765ebfe2..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/fc1_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-öñ¹=—>ª<Á˜>2JÉ< [½ZOÒ»’n€½`f½²u2½ì‚€½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/norm_cifar_input.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/norm_cifar_input.bin
deleted file mode 100644
index da6603283ee9157c0184b132bc7959d035daa0cc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/norm_cifar_input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/test_labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/test_labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10/test_labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv0.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv0.bin
deleted file mode 100644
index c72d8766fc333b3ac92f0428079b5a0c16960225..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv0.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv3.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv3.bin
deleted file mode 100644
index 11aeb21688b5a167c9071f6b85587162fb8530d8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv6.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv6.bin
deleted file mode 100644
index 461ee65ee372792be0c1b1e0bccc3c266398142e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv6.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv7.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv7.bin
deleted file mode 100644
index 26fdc870d534980277277b61d4c99cc808f409d7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv7.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv8.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv8.bin
deleted file mode 100644
index a0452cf34d2e7e69f568635f94bb0ebf3bf7e050..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv8.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias0.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias0.bin
deleted file mode 100644
index efaba5734071623f029f7b64ca555fc157021b12..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias0.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias3.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias3.bin
deleted file mode 100644
index 34685ce65b38a189b9a075e8fdac0eeda7908d58..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias6.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias6.bin
deleted file mode 100644
index 7132f0ae8528aa80b1647dc37bee4beecbda0968..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias6.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias7.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias7.bin
deleted file mode 100644
index 969733004c449fd612923bff2de59069952bc622..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias7.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias8.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias8.bin
deleted file mode 100644
index fa3e53cddddb91704a33d0f09550ef9c4b40aea0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/conv_bias8.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/fc12.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/fc12.bin
deleted file mode 100644
index b6b31a7990f166f9e50b273b40e1750244966217..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/fc12.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/fc_bias12.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/fc_bias12.bin
deleted file mode 100644
index d4981bfaed00dc48e9ef5fe7d77dda9163e64ce1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/fc_bias12.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ÿº="į<vÙ=ðJ=/
-P½xcµ»"<“½äKH½@kV¼ÃÊ/½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/input.bin
deleted file mode 100644
index da6603283ee9157c0184b132bc7959d035daa0cc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/src.cc
deleted file mode 100644
index 5bf0f36e985ab8603847fa077c5b0d7571b693c8..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_front/src.cc
+++ /dev/null
@@ -1,76 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("alexnet_cifar10_front/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv0.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv_bias0.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv3.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv_bias3.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv6.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv_bias6.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv7.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv_bias7.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv8.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv_bias8.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("fc12.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("fc_bias12.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 5, 5, 1, 1, 1, 0); 
-void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-void* var_2 = tensorTanh(var_1); 
-void* var_3 = tensorPooling(var_2,0,2,2,0,0,2,2); 
-void* var_5 = tensorConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 0); 
-void* var_6 = tensorAdd(var_5, conv2d_2_b); 
-void* var_7 = tensorTanh(var_6); 
-void* var_8 = tensorPooling(var_7,0,2,2,0,0,2,2); 
-void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-void* var_12 = tensorTanh(var_11); 
-void* var_13 = tensorConvolution(var_12, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-void* var_14 = tensorAdd(var_13, conv2d_4_b); 
-void* var_15 = tensorTanh(var_14); 
-void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-void* var_18 = tensorTanh(var_17); 
-void* var_19 = tensorPooling(var_18,0,2,2,0,0,2,2); 
-void* var_22 = tensorGemmGPU(var_19, dense_1_w); 
-void* var_23 = tensorAdd(var_22, dense_1_b); 
-void* var_24 = tensorSoftmax(var_23); 
-
-computeAccuracy2(labels,10000,var_24); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/approxhpvm_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/approxhpvm_src.cc
deleted file mode 100644
index 6255f6af32362abd1b37fa4409046410eefcd030..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/approxhpvm_src.cc
+++ /dev/null
@@ -1,443 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/stat.h> 
-#include <cstring> 
-#include <visc.h> 
-#include <tensorTypes.h> 
-#include <tensorUtils.h> 
-
-void var_0_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 5, 5, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_1_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_2_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_3_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_4_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 2, 2, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_5_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_6_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_7_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_8_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_9_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_10_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_11_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_12_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_13_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_14_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_15_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_16_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_tanh(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_17_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_18_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_mul(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_19_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_20_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_softmax(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void root(void* input, size_t input_bytes, 
-	  void* conv2d_1_w, size_t conv2d_1_w_bytes, 
-	  void* conv2d_1_b, size_t conv2d_1_b_bytes, 
-	  void* conv2d_2_w, size_t conv2d_2_w_bytes, 
-	  void* conv2d_2_b, size_t conv2d_2_b_bytes, 
-	  void* conv2d_3_w, size_t conv2d_3_w_bytes, 
-	  void* conv2d_3_b, size_t conv2d_3_b_bytes, 
-	  void* conv2d_4_w, size_t conv2d_4_w_bytes, 
-	  void* conv2d_4_b, size_t conv2d_4_b_bytes, 
-	  void* conv2d_5_w, size_t conv2d_5_w_bytes, 
-	  void* conv2d_5_b, size_t conv2d_5_b_bytes, 
-	  void* dense_1_w, size_t dense_1_w_bytes, 
-	  void* dense_1_b, size_t dense_1_b_bytes){ 
-
-
-  __visc__hint(visc::CPU_TARGET); 
-  __visc__attributes(13, input, conv2d_1_w, conv2d_1_b, conv2d_2_w, conv2d_2_b, conv2d_3_w, conv2d_3_b, conv2d_4_w, conv2d_4_b, conv2d_5_w, conv2d_5_b, dense_1_w, dense_1_b, 0); 
-
-
-  void* var_0 = __visc__createNodeND(0, var_0_node); 
-
-  __visc__bindIn(var_0, 0, 0, 0); 
-  __visc__bindIn(var_0, 1, 1, 0); 
-  __visc__bindIn(var_0, 2, 2, 0); 
-  __visc__bindIn(var_0, 3, 3, 0); 
-
-  void* var_1 = __visc__createNodeND(0, var_1_node); 
-
-  __visc__edge(var_0, var_1, 1, 0, 0, 0); 
-  __visc__edge(var_0, var_1, 1, 1, 1, 0); 
-  __visc__bindIn(var_1, 4, 2, 0); 
-  __visc__bindIn(var_1, 5, 3, 0); 
-
-  void* var_2 = __visc__createNodeND(0, var_2_node); 
-
-  __visc__edge(var_1, var_2, 1, 0, 0, 0); 
-  __visc__edge(var_1, var_2, 1, 1, 1, 0); 
-
-  void* var_3 = __visc__createNodeND(0, var_3_node); 
-
-  __visc__edge(var_2, var_3, 1, 0, 0, 0); 
-  __visc__edge(var_2, var_3, 1, 1, 1, 0); 
-
-  void* var_4 = __visc__createNodeND(0, var_4_node); 
-
-  __visc__edge(var_3, var_4, 1, 0, 0, 0); 
-  __visc__edge(var_3, var_4, 1, 1, 1, 0); 
-  __visc__bindIn(var_4, 6, 2, 0); 
-  __visc__bindIn(var_4, 7, 3, 0); 
-
-  void* var_5 = __visc__createNodeND(0, var_5_node); 
-
-  __visc__edge(var_4, var_5, 1, 0, 0, 0); 
-  __visc__edge(var_4, var_5, 1, 1, 1, 0); 
-  __visc__bindIn(var_5, 8, 2, 0); 
-  __visc__bindIn(var_5, 9, 3, 0); 
-
-  void* var_6 = __visc__createNodeND(0, var_6_node); 
-
-  __visc__edge(var_5, var_6, 1, 0, 0, 0); 
-  __visc__edge(var_5, var_6, 1, 1, 1, 0); 
-
-  void* var_7 = __visc__createNodeND(0, var_7_node); 
-
-  __visc__edge(var_6, var_7, 1, 0, 0, 0); 
-  __visc__edge(var_6, var_7, 1, 1, 1, 0); 
-
-  void* var_8 = __visc__createNodeND(0, var_8_node); 
-
-  __visc__edge(var_7, var_8, 1, 0, 0, 0); 
-  __visc__edge(var_7, var_8, 1, 1, 1, 0); 
-  __visc__bindIn(var_8, 10, 2, 0); 
-  __visc__bindIn(var_8, 11, 3, 0); 
-
-  void* var_9 = __visc__createNodeND(0, var_9_node); 
-
-  __visc__edge(var_8, var_9, 1, 0, 0, 0); 
-  __visc__edge(var_8, var_9, 1, 1, 1, 0); 
-  __visc__bindIn(var_9, 12, 2, 0); 
-  __visc__bindIn(var_9, 13, 3, 0); 
-
-  void* var_10 = __visc__createNodeND(0, var_10_node); 
-
-  __visc__edge(var_9, var_10, 1, 0, 0, 0); 
-  __visc__edge(var_9, var_10, 1, 1, 1, 0); 
-
-  void* var_11 = __visc__createNodeND(0, var_11_node); 
-
-  __visc__edge(var_10, var_11, 1, 0, 0, 0); 
-  __visc__edge(var_10, var_11, 1, 1, 1, 0); 
-  __visc__bindIn(var_11, 14, 2, 0); 
-  __visc__bindIn(var_11, 15, 3, 0); 
-
-  void* var_12 = __visc__createNodeND(0, var_12_node); 
-
-  __visc__edge(var_11, var_12, 1, 0, 0, 0); 
-  __visc__edge(var_11, var_12, 1, 1, 1, 0); 
-  __visc__bindIn(var_12, 16, 2, 0); 
-  __visc__bindIn(var_12, 17, 3, 0); 
-
-  void* var_13 = __visc__createNodeND(0, var_13_node); 
-
-  __visc__edge(var_12, var_13, 1, 0, 0, 0); 
-  __visc__edge(var_12, var_13, 1, 1, 1, 0); 
-
-  void* var_14 = __visc__createNodeND(0, var_14_node); 
-
-  __visc__edge(var_13, var_14, 1, 0, 0, 0); 
-  __visc__edge(var_13, var_14, 1, 1, 1, 0); 
-  __visc__bindIn(var_14, 18, 2, 0); 
-  __visc__bindIn(var_14, 19, 3, 0); 
-
-  void* var_15 = __visc__createNodeND(0, var_15_node); 
-
-  __visc__edge(var_14, var_15, 1, 0, 0, 0); 
-  __visc__edge(var_14, var_15, 1, 1, 1, 0); 
-  __visc__bindIn(var_15, 20, 2, 0); 
-  __visc__bindIn(var_15, 21, 3, 0); 
-
-  void* var_16 = __visc__createNodeND(0, var_16_node); 
-
-  __visc__edge(var_15, var_16, 1, 0, 0, 0); 
-  __visc__edge(var_15, var_16, 1, 1, 1, 0); 
-
-  void* var_17 = __visc__createNodeND(0, var_17_node); 
-
-  __visc__edge(var_16, var_17, 1, 0, 0, 0); 
-  __visc__edge(var_16, var_17, 1, 1, 1, 0); 
-
-  void* var_18 = __visc__createNodeND(0, var_18_node); 
-
-  __visc__edge(var_17, var_18, 1, 0, 0, 0); 
-  __visc__edge(var_17, var_18, 1, 1, 1, 0); 
-  __visc__bindIn(var_18, 22, 2, 0); 
-  __visc__bindIn(var_18, 23, 3, 0); 
-
-  void* var_19 = __visc__createNodeND(0, var_19_node); 
-
-  __visc__edge(var_18, var_19, 1, 0, 0, 0); 
-  __visc__edge(var_18, var_19, 1, 1, 1, 0); 
-  __visc__bindIn(var_19, 24, 2, 0); 
-  __visc__bindIn(var_19, 25, 3, 0); 
-
-  void* var_20 = __visc__createNodeND(0, var_20_node); 
-
-  __visc__edge(var_19, var_20, 1, 0, 0, 0); 
-  __visc__edge(var_19, var_20, 1, 1, 1, 0); 
-
-  __visc__bindOut(var_20, 0, 0, 0); 
-  __visc__bindOut(var_20, 1, 1, 0); 
-
-}
-
-struct ret_t {
-  void* tensor; 
-  size_t bytes; 
-}; 
-
-typedef struct __attribute__((__packed__)) {
-  void* input; 
-  size_t input_bytes; 
-  void* conv2d_1_w; 
-  size_t conv2d_1_w_bytes; 
-  void* conv2d_1_b; 
-  size_t conv2d_1_b_bytes; 
-  void* conv2d_2_w; 
-  size_t conv2d_2_w_bytes; 
-  void* conv2d_2_b; 
-  size_t conv2d_2_b_bytes; 
-  void* conv2d_3_w; 
-  size_t conv2d_3_w_bytes; 
-  void* conv2d_3_b; 
-  size_t conv2d_3_b_bytes; 
-  void* conv2d_4_w; 
-  size_t conv2d_4_w_bytes; 
-  void* conv2d_4_b; 
-  size_t conv2d_4_b_bytes; 
-  void* conv2d_5_w; 
-  size_t conv2d_5_w_bytes; 
-  void* conv2d_5_b; 
-  size_t conv2d_5_b_bytes; 
-  void* dense_1_w; 
-  size_t dense_1_w_bytes; 
-  void* dense_1_b; 
-  size_t dense_1_b_bytes; 
-
-  struct ret_t r; 
-}
-RootIn;
-
-int main(){ 
-
-std::string dir_prefix = std::string("alexnet_cifar10_test/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-
-__visc__init(); 
-RootIn* args = static_cast<RootIn*>(malloc(sizeof(RootIn))); 
-
-args->input = input; 
-args->input_bytes = 0; 
-args->conv2d_1_w = conv2d_1_w; 
-args->conv2d_1_w_bytes = 0; 
-args->conv2d_1_b = conv2d_1_b; 
-args->conv2d_1_b_bytes = 0; 
-args->conv2d_2_w = conv2d_2_w; 
-args->conv2d_2_w_bytes = 0; 
-args->conv2d_2_b = conv2d_2_b; 
-args->conv2d_2_b_bytes = 0; 
-args->conv2d_3_w = conv2d_3_w; 
-args->conv2d_3_w_bytes = 0; 
-args->conv2d_3_b = conv2d_3_b; 
-args->conv2d_3_b_bytes = 0; 
-args->conv2d_4_w = conv2d_4_w; 
-args->conv2d_4_w_bytes = 0; 
-args->conv2d_4_b = conv2d_4_b; 
-args->conv2d_4_b_bytes = 0; 
-args->conv2d_5_w = conv2d_5_w; 
-args->conv2d_5_w_bytes = 0; 
-args->conv2d_5_b = conv2d_5_b; 
-args->conv2d_5_b_bytes = 0; 
-args->dense_1_w = dense_1_w; 
-args->dense_1_w_bytes = 0; 
-args->dense_1_b = dense_1_b; 
-args->dense_1_b_bytes = 0; 
-
-void* dfg = __visc__launch(0, root, (void*) args); 
-
-__visc__wait(dfg); 
-
-void *result = static_cast<RootIn*>(args)->input; 
-hpvm_request_tensor(result, 0); 
-
-__visc__cleanup(); 
- computeAccuracy2(labels, 10000, result); 
-return 0; 
-
-} 
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_1_b.bin
deleted file mode 100644
index efaba5734071623f029f7b64ca555fc157021b12..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_1_w.bin
deleted file mode 100644
index c72d8766fc333b3ac92f0428079b5a0c16960225..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_2_b.bin
deleted file mode 100644
index 34685ce65b38a189b9a075e8fdac0eeda7908d58..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_2_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_2_w.bin
deleted file mode 100644
index 11aeb21688b5a167c9071f6b85587162fb8530d8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_3_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_3_b.bin
deleted file mode 100644
index 7132f0ae8528aa80b1647dc37bee4beecbda0968..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_3_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_3_w.bin
deleted file mode 100644
index 461ee65ee372792be0c1b1e0bccc3c266398142e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_4_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_4_b.bin
deleted file mode 100644
index 969733004c449fd612923bff2de59069952bc622..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_4_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_4_w.bin
deleted file mode 100644
index 26fdc870d534980277277b61d4c99cc808f409d7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_5_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_5_b.bin
deleted file mode 100644
index fa3e53cddddb91704a33d0f09550ef9c4b40aea0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_5_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_5_w.bin
deleted file mode 100644
index a0452cf34d2e7e69f568635f94bb0ebf3bf7e050..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/dense_1_b.bin
deleted file mode 100644
index d4981bfaed00dc48e9ef5fe7d77dda9163e64ce1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/dense_1_b.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ÿº="į<vÙ=ðJ=/
-P½xcµ»"<“½äKH½@kV¼ÃÊ/½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/dense_1_w.bin
deleted file mode 100644
index b6b31a7990f166f9e50b273b40e1750244966217..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/input.bin
deleted file mode 100644
index 17e72b23c956926964ad3c8a8becc69267cc854d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/labels.bin
deleted file mode 100644
index 6a9c75fdbe3754a721541269cc6b03d6a0ff3485..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/labels32.bin b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/labels32.bin
deleted file mode 100644
index 4c875ae0ae30adbbe79df60ae5f8dc70e21a5ed8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/labels32.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/layer_composition.txt b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/layer_composition.txt
deleted file mode 100644
index f4a3785d2c039705b15d159fa2beb972de4fcb3f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/layer_composition.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-conv  add  activation  pool  
-conv  add  activation  pool  
-conv  add  activation  
-conv  add  activation  
-conv  add  activation  pool  
-dense  add  
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/layers.txt b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/layers.txt
deleted file mode 100644
index 640c28e7cb31bd8032860a5ef63e74495335d94c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/layers.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Conv1,10000,3,32,32,64,3,11,11
-Conv2,10000,64,16,16,192,64,5,5
-Conv3,10000,192,8,8,384,192,3,3
-Conv4,10000,384,8,8,256,384,3,3
-Conv5,10000,256,8,8,256,256,3,3
-FC1,10000,4096,4096,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/promise_src.cc
deleted file mode 100644
index 60b7e1e7661078c0e7791421e0d64aacff60c178..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/promise_src.cc
+++ /dev/null
@@ -1,93 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-int total_runs = 100; 
-for (int i = 0 ; i < total_runs; i++){ 
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-
-
-std::string dir_prefix = std::string("alexnet_cifar10_test/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = ConvLayer_PROMISE(input, -1.8816426241908337, 2.0934095498544254, conv2d_1_w, -0.33087718, 0.3323643, conv2d_1_b, -0.7782218, 0.6020472, 5, 5, 1, 1, 0, 2, 0, -1.0, 1.0, 9); 
-void* var_1 = ConvLayer_PROMISE(var_0, -1.0, 1.0, conv2d_2_w, -0.2095158, 0.33543423, conv2d_2_b, -0.45020863, 0.30596754, 2, 2, 1, 1, 0, 2, 0, -1.0, 1.0, 9); 
-void* var_2 = ConvLayer_PROMISE(var_1, -1.0, 1.0, conv2d_3_w, -0.1715614, 0.17037082, conv2d_3_b, -0.6519161, 0.5939945, 1, 1, 1, 1, -1, 0, 0, -1.0, 1.0, 9); 
-void* var_3 = ConvLayer_PROMISE(var_2, -1.0, 1.0, conv2d_4_w, -0.15575546, 0.14456555, conv2d_4_b, -0.55873865, 0.4704539, 1, 1, 1, 1, -1, 0, 0, -1.0, 1.0, 9); 
-void* var_4 = ConvLayer_PROMISE(var_3, -1.0, 1.0, conv2d_5_w, -0.16108225, 0.16864482, conv2d_5_b, -0.22135437, 0.10401678, 1, 1, 1, 1, 0, 2, 0, -1.0, 1.0, 9); 
-void* var_5 = FCLayer_PROMISE(var_4, -1.0, 1.0, dense_1_w, -0.18183032, 0.19018902, dense_1_b, -0.07189204, 0.106005594, -1, -30.96493, 44.04829, 9); 
-void* var_6 = tensorSoftmax(var_5); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_6); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-}
-
-dumpExecutionAccuracies(); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/src.cc
deleted file mode 100644
index d972cc0c30bfbf0fc185ac70499a7578e3946572..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/alexnet_cifar10_test/src.cc
+++ /dev/null
@@ -1,99 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("alexnet_cifar10_test/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,11,11); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,192,64,5,5); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,384,192,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,256,384,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,4096,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 5, 5, 1, 1, 1, 0); 
-void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-void* var_2 = tensorTanh(var_1); 
-void* var_3 = tensorPooling(var_2,0,2,2,0,0,2,2); 
-void* var_5 = tensorConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 0); 
-void* var_6 = tensorAdd(var_5, conv2d_2_b); 
-void* var_7 = tensorTanh(var_6); 
-void* var_8 = tensorPooling(var_7,0,2,2,0,0,2,2); 
-void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-void* var_12 = tensorTanh(var_11); 
-void* var_13 = tensorConvolution(var_12, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-void* var_14 = tensorAdd(var_13, conv2d_4_b); 
-void* var_15 = tensorTanh(var_14); 
-void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-void* var_18 = tensorTanh(var_17); 
-void* var_19 = tensorPooling(var_18,0,2,2,0,0,2,2); 
-void* var_22 = tensorGemmGPU(var_19, dense_1_w); 
-void* var_23 = tensorAdd(var_22, dense_1_b); 
-void* var_24 = tensorSoftmax(var_23); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_24); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv1.bin
deleted file mode 100644
index 6f71aade324ae87f604c62c4eb15769cfb0d5844..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv1_bias.bin
deleted file mode 100644
index a76d1cb8c50dc00f51379359e3bbfcf7709657d9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv1_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-Òkc<Ðn‰<QHÓ¼˜¾‚¼f€0:e$­<«.¼™Ç£=3b< ޽Öõ“»·=™r¼ŒN=¯¦ ¹=?=QÊÿ¼l¼¶<“‘/=Dç—=«Þ¦½EœË<™<›<_‹Å:_Jæ<èúeï
;yQ–»`dI½Õ<|N0=ãÛ¸<
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv2.bin
deleted file mode 100644
index 486dcacc3ecb15e2eb62b4b8354e79eebfbfd7dd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv2_bias.bin
deleted file mode 100644
index 7b2e6c4ead7fca849c88b6fd6ad7c1d1c2d0f606..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv3.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv3.bin
deleted file mode 100644
index 41716fd7cec3db014992b630750413ce16e6f7d6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv3_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv3_bias.bin
deleted file mode 100644
index 8ef1cc09bc990836f0d3f35e855e08746203ae52..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv3_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv4.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv4.bin
deleted file mode 100644
index 1d081ffda67c56492e2e5588fcf4ca99252680b2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv4.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv4_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv4_bias.bin
deleted file mode 100644
index 45da3d8a5c9298b575178838a51da4bdb9d18515..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/conv4_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc1.bin
deleted file mode 100644
index 4fb0f40947346a682c8d17ed797cfc7d309fd1c5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc1_bias.bin
deleted file mode 100644
index 07439b29d8120548feda150a69e7c749308e5578..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc2.bin
deleted file mode 100644
index a186b2adedf3518e70db72ba8a0282733c0959a1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc2_bias.bin
deleted file mode 100644
index 10e3aa2b3e50ec1f2211a6214a48e0ddc8345997..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/fc2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-w„ ½j­‡½”x½›cb½‘*{½€%³½Îœ½
-™½!Én½À›½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/input.bin
deleted file mode 100644
index 1f0320e2c012608fd9869f721f023c2667b6d2e0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/labels.bin
deleted file mode 100644
index daac5750a5ef9ceaf7c708b4c0c34eb009684404..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/test_labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/test_labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/cifar_keras/test_labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc1.bin
deleted file mode 100644
index bc4e57ac0a859851103667d6eb8cc835b70e04de..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc1_bias.bin
deleted file mode 100644
index 3acb43f03590d809b55df5a4cc264a1d4f8318ba..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc2.bin
deleted file mode 100644
index ccac40a8f33803d941fa0041c8568ea589fdd945..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc2_bias.bin
deleted file mode 100644
index 3aff63b21593ec7ebd0c04f41f151bad113cc2e4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/fc2_clipped/fc2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-ã¿ê¥>¿½×J?
ñ"¿Rùû¾8‰Š¼=ìß>qO¿ÂB?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc1.bin
deleted file mode 100644
index be2731c02774a6ed6c554121cb507ca753b87144..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc1_bias.bin
deleted file mode 100644
index ea4933ab67c82010bde969df1028adb828c0a44d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc2.bin
deleted file mode 100644
index d69299cae0826fc57a32a3bc389bcf25603d9bc6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc2_bias.bin
deleted file mode 100644
index 1001ecf3ac16b388f4619d12bfeab4ca7db3e726..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc3.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc3.bin
deleted file mode 100644
index dd97ce049b50c71430f4db63219931e168e05515..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc3_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc3_bias.bin
deleted file mode 100644
index 460b7fe40bd60902338b99aed2446ed746c1c8ea..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/fc3_clipped/fc3_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-8I¿˜jc?—ì>¢_ѽDÒ=b"¿Hý½ÊMõ>	xo¿3*?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc1.bin
deleted file mode 100644
index 56ec40072906c21991c9bee2985651f6fadeaba0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc1_bias.bin
deleted file mode 100644
index 421dd8c3782c2acfe019530565c2855cdccd3bf0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc2.bin
deleted file mode 100644
index cf18f1c5d631ce9734cfc067b503c7d9484fd1ba..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc2_bias.bin
deleted file mode 100644
index 7fce451559ed38bfa5d642f6df6d13e270a49c4c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc3.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc3.bin
deleted file mode 100644
index 57938a2e2db17982f9646ea57cb2551c41f1ce8f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc3.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc3_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc3_bias.bin
deleted file mode 100644
index f5ae2b4abd77063bd80cda4a9321c62d5a42070b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc3_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc4.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc4.bin
deleted file mode 100644
index 7ed4b5a50917b127351dc7a673a8c87ac8ddedd6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc4.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc4_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc4_bias.bin
deleted file mode 100644
index 0633a1a2bf444ecbe6e0b5896c92b72a5fc7ecdb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/fc4_clipped/fc4_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-Éìù¾°ŸÐ>-¾¤‹ó¾}*&¿ñf¿C/˜¾i¤à½v”=?®éJ¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv1.bin
deleted file mode 100644
index 670261107ead208daa54e1471a1818b098315ee7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv1_bias.bin
deleted file mode 100644
index e4469ef562dae74e197fa44fb8ac762ac5c4a288..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv1_bias.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-òÙ¼W½Bì;Çz$ k'a=a¸î½`Ôƒ¸v£Žµ¤ÿ¾†	W¾ƒLl½ÇÄž=?š¿Zîß=<ò!¿x”"¿Ž!”‹Á
-8½à’¼u¶¾d쿃]¼Ffr¾ZÅ×½#¶½^žº±Ø½j5¼€)
-¿æc4¿lÿå¾uú½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv2.bin
deleted file mode 100644
index 3e03b76e819f700dc0e5a23919e1ec277ec4774b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv2_bias.bin
deleted file mode 100644
index 3a8053c9575fcff5134dbb921accaf37fd294e46..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/conv2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-œ§Y¾¨ü¸¾ Ñ¼£2߸Z—‹Ê÷˜,V5=]±¾¿ë„¾¨çE§
-¨¹²Z`¾MÑϾȼߏ–‹U”‚¾"Rƒ<­2À¾4Áª»íž)=L¹E¬Ç …½[ÚA¾‰	¸½!âs½]1¼›²#¾o"Ÿ†u=]Öéº-ìý½”*»A9ú½¼ö½¾d6Š<ˆ1æ‹Ìf¾c«¬<>R“‹M޾K®½Mÿù½j¾-½O™…¾Ì8!½ßò"¾Dqú¹Œ|¢¾Cæ.½öýZ¾»8\¾p½‹Ó-ν¯*÷½}'r½ú'“»e›”‹5î¼)/½>¤>bÓÏ«Öaô¤¶'¾ØÞ¿¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc1.bin
deleted file mode 100644
index 6e27b6e9147a65e1db3477bfda0a0a1274783215..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc1_bias.bin
deleted file mode 100644
index a28db7e3aeb6e2892d31122603a83667995da874..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc2.bin
deleted file mode 100644
index 01774f984fdf39eaeba5023caa01fe203046667f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc2_bias.bin
deleted file mode 100644
index d59c5dee69dffdc9ca49a0707244595074e1a471..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_clipped/fc2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-h.6=)¿†|’¾ïë
-¢_µ'>;‚n¼¤Oò¼6T®=t#M¾D{°¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv1.bin
deleted file mode 100644
index 202d11adb809f2035db1d4d09806661b8bd978cb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv1_bias.bin
deleted file mode 100644
index 11ac18222cac00e38c809f5f132824e1000ccb50..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv2.bin
deleted file mode 100644
index bea1dfe0079e8fb80730620cad4859f5e2baaa9a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv2_bias.bin
deleted file mode 100644
index f537eb0cd2ec3847bcb90fab8bb5025157097b1e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/conv2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc1.bin
deleted file mode 100644
index c43543bb447c31e60545358929f4df460a1b0d9f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc1_bias.bin
deleted file mode 100644
index 01bd440f4f16036b2ceafbbb5629ee02082ed82d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc2.bin
deleted file mode 100644
index 7b51cb239a1423432059bd84feee57f70068d1fe..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc2_bias.bin
deleted file mode 100644
index 76535beffd242bfe579ea55cf82e80c60d871c96..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_params/fc2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-°Ï=ô ?«~u¿£q½;ƒÀª=G›‚?3$¿"µq¿nTJ?'jE>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv1.bin
deleted file mode 100644
index 1f67654d3ad5fd01f92efab6b7977ba43bdd523d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv1_bias.bin
deleted file mode 100644
index 9ef4abd162cf515ef3df6f9f10e0d281165d39ec..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv1_bias.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-óDž=ÀÏ¾	×ô»XC=Pñj>§.j=B(s»0óº¾ñ$,¼úr޾
Ëļ\øÊ=TYh»Æ¦¸¾¦Cï=+ý¾½
-źjϽ.Ud¾d*>Q¹Ò¾âÝU¾cqC<óFÓ=°
-Í=k>ɑνX­õ¾˜Ç¾Å›S¾Ô<¾ps¼
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv2.bin
deleted file mode 100644
index fc46877a1a81154f8c78f559c457a0f691289a48..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv2_bias.bin
deleted file mode 100644
index 42ee244812fd9c46f7208fd8396bf4dbf5df6197..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/conv2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-º†¾O=×Ä—>®¯¾𡾁çÕ½i;H=
R—¾'†(>†Ÿ<#>wc\¾
2˾<<í¼¨þ¾Á‡–=¢;ïºÃ†›½÷¼È<øx„>¸/H¾Šw<¬”_¾™¦Ê»øJF>4ñ<ÙµD>È8(=ºà=‹°°¾©ùP¾8½Ñ/ɽɖƒ¾¯åð<ñ€‚½¸¼½=]¾ƒl¾´ÊL½Ù§…¾ªûÕ<ÊT˾¬>>_ꌾÁ¾¾Ôa>tC>ÉÛ=‘ü”½Í­¾)5D¾Þ‹X½Ô&>éP‹<Þöi¾‚[j¾1_ýî¾±ØO¾/¢=aÙ
-¾1Z\=ãÙŽ«
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc1.bin
deleted file mode 100644
index c9dd2a8c001e075b52feffcd2538107a08c74932..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc1_bias.bin
deleted file mode 100644
index 77d4c31025156e35069533ef65623a6b019962df..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc2.bin
deleted file mode 100644
index 92c9f125a58d324947f20b3d41754fd491c2ac63..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc2_bias.bin
deleted file mode 100644
index d4cb460055bfd675b1c576c7224d00b362d1aa7f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh/fc2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ì@÷=nс>óñ#>:–¡>)(Ó½Í-¤>$Úh>5¹½¥0„¾
-꿾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv1.bin
deleted file mode 100644
index 1f67654d3ad5fd01f92efab6b7977ba43bdd523d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv1_bias.bin
deleted file mode 100644
index 9ef4abd162cf515ef3df6f9f10e0d281165d39ec..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv1_bias.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-óDž=ÀÏ¾	×ô»XC=Pñj>§.j=B(s»0óº¾ñ$,¼úr޾
Ëļ\øÊ=TYh»Æ¦¸¾¦Cï=+ý¾½
-źjϽ.Ud¾d*>Q¹Ò¾âÝU¾cqC<óFÓ=°
-Í=k>ɑνX­õ¾˜Ç¾Å›S¾Ô<¾ps¼
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv2.bin
deleted file mode 100644
index fc46877a1a81154f8c78f559c457a0f691289a48..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv2_bias.bin
deleted file mode 100644
index 42ee244812fd9c46f7208fd8396bf4dbf5df6197..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/conv2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-º†¾O=×Ä—>®¯¾𡾁çÕ½i;H=
R—¾'†(>†Ÿ<#>wc\¾
2˾<<í¼¨þ¾Á‡–=¢;ïºÃ†›½÷¼È<øx„>¸/H¾Šw<¬”_¾™¦Ê»øJF>4ñ<ÙµD>È8(=ºà=‹°°¾©ùP¾8½Ñ/ɽɖƒ¾¯åð<ñ€‚½¸¼½=]¾ƒl¾´ÊL½Ù§…¾ªûÕ<ÊT˾¬>>_ꌾÁ¾¾Ôa>tC>ÉÛ=‘ü”½Í­¾)5D¾Þ‹X½Ô&>éP‹<Þöi¾‚[j¾1_ýî¾±ØO¾/¢=aÙ
-¾1Z\=ãÙŽ«
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc1.bin
deleted file mode 100644
index c9dd2a8c001e075b52feffcd2538107a08c74932..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc1_bias.bin
deleted file mode 100644
index 77d4c31025156e35069533ef65623a6b019962df..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc2.bin
deleted file mode 100644
index 92c9f125a58d324947f20b3d41754fd491c2ac63..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc2_bias.bin
deleted file mode 100644
index d4cb460055bfd675b1c576c7224d00b362d1aa7f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet2_tanh2/fc2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ì@÷=nс>óñ#>:–¡>)(Ó½Í-¤>$Úh>5¹½¥0„¾
-꿾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv1.bin
deleted file mode 100644
index 321259debd6ff4d1445edcadd69cc6de53f3f03d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv1_bias.bin
deleted file mode 100644
index 484b86517f5496d0f97f7d5af2c0ad04b869be6e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv1_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-;n^?(5à¾Ärp¾øgB¿±"ì¾Vø9¿û»=Z¾>EE¿Úýš>"[€½°0r¾v!Ä>³ð¢½M°!¿±TY¿yÕ_¾š-¾"~G?
5²¾Ë~ý¾yk&¿s2K¾#¢?e
-?«E¿w€=)X=4Z¾	,ö>/b¦>„+>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv2.bin
deleted file mode 100644
index 612cedbfad68ad210388bb7c1a9825e97e673872..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv2_bias.bin
deleted file mode 100644
index 2994d417de60c4a04c671978c6cbff4c9457d9ef..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/conv2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc1.bin
deleted file mode 100644
index 78f283aa48f4fb58fe3bc07a5320836107269596..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc1_bias.bin
deleted file mode 100644
index cbd06906d6c5a3bf16e45c01d8ccbc2338b20bc8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc2.bin
deleted file mode 100644
index 637b3538402d599668af1a90e405345cab4c45b7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc2_bias.bin
deleted file mode 100644
index fab36f6c21745e478934d98b64ca1220d0c9fc0e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet3_params/fc2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-É#¨:»Od:É;[€<F ×¹ÀÒC·tZ¸x[Ÿ9ƒ…Û<Übˆº
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv1.bin
deleted file mode 100644
index 89ab6ad37cac94360f7f87c93676f353829f1deb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv1_bias.bin
deleted file mode 100644
index 0a2a381337e13fe52959c838b4a2bedab3c3f8ab..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv1_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-h4Q;¤ù;34¼j0_½G½–h;ìz/½ðÇÊ:àk¥¼{l½t+O;u¼8™¨¼d»”½®¼}8›<íO’¼äÕ¿»¤#½„ö¼”u<¼¿l…¼f¢;Ð4½ŠO ½>Øž¼7K¼04½ÎG:à'½ÔOF½M=;
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv2.bin
deleted file mode 100644
index 6cd00b88c5be6e212f2d3a37c8ea2a8edb1ceca7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv2_bias.bin
deleted file mode 100644
index c0adf3e885ce855a0cc9d1b4b12f73665187159e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/conv2_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc1.bin
deleted file mode 100644
index 152c5bb0baae480f6b8d317889fc68f8d77247b6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc1_bias.bin
deleted file mode 100644
index 58221f45cdc56049b2edc29c244ea9d797a87fb5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc2.bin
deleted file mode 100644
index 97d78a9610b15be285661c1d762026c9fa4100cb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc2_bias.bin
deleted file mode 100644
index cbda59beef150dfbca756621286f042ec8e247bf..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/fc2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-Ê%”½ùb½Ó„g½W•­½$VĽéum½'Ƶ½J§’½·¾¶½›­¢½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/input.bin
deleted file mode 100644
index 4d2423f74188cfe0364185ccb66837785ccf4c4e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/labels.bin
deleted file mode 100644
index 5e1f3881897f4729d6d90ff208a08ccdabb8fe7c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/labels32.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/labels32.bin
deleted file mode 100644
index 6f1d7576cd18621a2cf646d0dd835846623589e5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_keras/labels32.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv1.bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv1.bias.bin
deleted file mode 100644
index 7536ef9f25e8fe7c7d47dac2857fe1cb291464d6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv1.bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-åMS¾¶õÊ=9Øø=…¾kO§>¬¯¾8g¾šÍ®>€Jn=ܑѽ¥>
-”>ô+L>ä„—>a¹¥>Jº“¾B*3>‹èM>Û­`>kßÅ<
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv1.bin
deleted file mode 100644
index 48dabc33ff1ffc605aba73b34f884c2e43f23910..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv2.bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv2.bias.bin
deleted file mode 100644
index 103ae6938d19b43b462c352d4c4d23c0bef7caaf..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv2.bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv2.bin
deleted file mode 100644
index ff4242bb8002c7e81e5655bfa197541da6a9921f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/datasets/t10k-images-idx3-ubyte b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/datasets/t10k-images-idx3-ubyte
deleted file mode 100644
index 1170b2cae98de7a524b163fcc379ac8f00925b12..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/datasets/t10k-images-idx3-ubyte and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/datasets/t10k-labels-idx1-ubyte b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/datasets/t10k-labels-idx1-ubyte
deleted file mode 100644
index 5e1f3881897f4729d6d90ff208a08ccdabb8fe7c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/datasets/t10k-labels-idx1-ubyte and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/input.bin
deleted file mode 100644
index 4d2423f74188cfe0364185ccb66837785ccf4c4e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip1.bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip1.bias.bin
deleted file mode 100644
index a9f5de5084ae4506e610ebe7deba62de40f3e536..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip1.bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip1.bin
deleted file mode 100644
index 232032080ffe11e84977e84ebfde02c728ba2718..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip2.bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip2.bias.bin
deleted file mode 100644
index 67e323754adf73f147a1776916d6f48b7fdd7782..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip2.bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-Ž½ÒŠ;=-‰¼Çjz¼霺YÕ¼—ˆ<X’ƒ¹ Mú¼œò•¼
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip2.bin
deleted file mode 100644
index ddde5fb3258d7abea7ece3fc0455e7532e4a30ee..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/ip2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/labels.bin
deleted file mode 100644
index 5e1f3881897f4729d6d90ff208a08ccdabb8fe7c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/mnist_float_input.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/mnist_float_input.bin
deleted file mode 100644
index 779dcf7f6ad72f3e22d5c96148d2f0f7e11e39b8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_params/mnist_float_input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_1_b.bin
deleted file mode 100644
index a6ba5ca7a322a2ec5e0cecc747b1e79437da4aa5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-{2(½ué¼2";”C< Ì;Ø-ƹhŒ®:
¦¼F|¼UU´;̪½;§¡ç¼,ž»¬ù»[m#½Î–F<>l–»“b<+²»'R¿»¾´Ú9‘–û¼`Ï/;3Ù8<F§¼²4a¼åb©¼`ì»…“¶<Ù"’<ïõU¼T¶=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_1_w.bin
deleted file mode 100644
index 5debef814114d7417e0be4e81b74f4967bae773e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_2_b.bin
deleted file mode 100644
index b2b66205d286e9d4d293c9f66d073db3c093e77c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_2_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_2_w.bin
deleted file mode 100644
index 0657d2ceb83ed3810eec690c426658d10deafe75..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_3_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_3_b.bin
deleted file mode 100644
index 413180bf64912138d5818cb36f1a43e9dfc33292..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_3_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_3_w.bin
deleted file mode 100644
index 076b7f1b3acd2e7e99cff2e4dd65247d28d5f69c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_1_b.bin
deleted file mode 100644
index 33830cae0be25a47c3be0de7f79d8993b1059a82..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_1_w.bin
deleted file mode 100644
index fa27b1a0bd7c21dc56cb6c0e7220ecfc8f90ee42..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_2_b.bin
deleted file mode 100644
index 4b85c5f1a37519d5c754798f59bd22aa9ac36b45..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_2_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_2_w.bin
deleted file mode 100644
index b5ea02a6ded22099f1f93457393412a80345f826..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/dense_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/input.bin
deleted file mode 100644
index 4d2423f74188cfe0364185ccb66837785ccf4c4e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/labels.bin
deleted file mode 100644
index 5e1f3881897f4729d6d90ff208a08ccdabb8fe7c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/promise_src.cc
deleted file mode 100644
index 93e7652ce57b0a54c8300e1df380ff07c7052f51..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/promise_src.cc
+++ /dev/null
@@ -1,58 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-
-std::string dir_prefix = std::string("lenet_relu/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,1,28,28); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,5,5); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,3136,1024); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_0 = ConvLayer_PROMISE(input, 0.0, 1.0, conv2d_1_w, -0.2722561, 0.25817025, conv2d_1_b, -0.041063767, 0.031912163, 2, 2, 1, 1, 0, 2, 1, 0.0, 1.5512946, 9); 
-void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 1.5512946, conv2d_2_w, -0.17580177, 0.16332611, conv2d_2_b, -0.041385915, 0.05869476, 2, 2, 1, 1, -1, 0, 1, 0.0, 4.916329, 9); 
-void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 4.916329, conv2d_3_w, -0.20324017, 0.18275258, conv2d_3_b, -0.039915435, 0.04589232, 1, 1, 2, 2, -1, 0, 1, 0.0, 9.447418, 9); 
-void* var_3 = FCLayer_PROMISE(var_2, 0.0, 9.447418, dense_1_w, -0.10757191, 0.123126, dense_1_b, -0.025070198, 0.027000334, 1, 0.0, 9.926857, 9); 
-void* var_4 = FCLayer_PROMISE(var_3, 0.0, 9.926857, dense_2_w, -0.18867673, 0.16425411, dense_2_b, -0.012622595, 0.04586973, 1, 0.0, 42.018578, 9); 
-void* var_5 = tensorSoftmax(var_4); 
-
-computeAccuracy2(labels,10000,var_5); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/src.cc
deleted file mode 100644
index 5524c2edb5396ffd26e9907630b8514f9eb57e42..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_relu/src.cc
+++ /dev/null
@@ -1,68 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("lenet_relu/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,1,28,28); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,1,5,5); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,5,5); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,64,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,3136,1024); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,1024,1,1); 
-std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,1024,10); 
-std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 2, 2, 1, 1, 1, 0); 
-void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-void* var_2 = tensorRelu(var_1); 
-void* var_3 = tensorPooling(var_2,0,2,2,0,0,2,2); 
-void* var_4 = tensorConvolution(var_3, conv2d_2_w, 2, 2, 1, 1, 1, 0); 
-void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-void* var_6 = tensorRelu(var_5); 
-void* var_8 = tensorConvolution(var_6, conv2d_3_w, 1, 1, 2, 2, 1, 0); 
-void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-void* var_10 = tensorRelu(var_9); 
-void* var_12 = tensorGemmGPU(var_10, dense_1_w); 
-void* var_13 = tensorAdd(var_12, dense_1_b); 
-void* var_14 = tensorRelu(var_13); 
-void* var_15 = tensorGemmGPU(var_14, dense_2_w); 
-void* var_16 = tensorAdd(var_15, dense_2_b); 
-void* var_17 = tensorRelu(var_16); 
-void* var_18 = tensorSoftmax(var_17); 
-
-computeAccuracy2(labels,10000,var_18); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv1.bin
deleted file mode 100644
index c841ed3b821617f81fc8764830868e64713668db..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv1_bias.bin
deleted file mode 100644
index c4566564e1ad5dc9a0231575ca33f752b53c24a7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv2.bin
deleted file mode 100644
index 2304c792451e65d7a6f4615060dfc0c90164dc29..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv2_bias.bin
deleted file mode 100644
index 08c01b1586c269269d8dc8951afb7cd0c02606b2..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/conv2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-è¾#¬¿ÀIº¾¹¾ë ɾ¨9D¾èÍž¾£½fn»>_Ái>ú»«¾PØ>
-þv¾K,,¾az.¾&Ѿc…:¾EÂ¥>\Œ«>œöú½á†]½ÿ%]¾ö¸P>íi<>Ðû\¾¹ã¾åÀ)¾d€™>oÞ½«‡%>ŽÎ9½zNâ½È:>Ù˜\¾¦ºj>vP>ÿ›4>‚¾ÖDà¾õ§Ð¾¶õ¬>qS¾Œ'á¾:é;zb$>Àƒu>í9w>¦˜n½+Ò¾»‘…¾“ÐS½ª
>с˜º½b¾#)¸¾’%e>=ä#<Íà¾Ã¾E‰2¾]«Y=r¦³½)*k¾ ,¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc1.bin
deleted file mode 100644
index 3f59c44723443b40667340a60ae20311133c425a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc1_bias.bin
deleted file mode 100644
index 24656c9753f9ab1b6d8b648f2fe7f3d6af24bebd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc2.bin
deleted file mode 100644
index 1b567de77acfc62b54ec4a676df8256b07a6b127..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc2_bias.bin
deleted file mode 100644
index a7c0eae24cc844613f616fda43cd444c5f506ebf..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/fc2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-µs¾Ó˜>P5>ù>bëÈ=_Ïú½ƒA‚>Äô}>\¾+Nè<
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv1.bin
deleted file mode 100644
index c841ed3b821617f81fc8764830868e64713668db..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv1_bias.bin
deleted file mode 100644
index c4566564e1ad5dc9a0231575ca33f752b53c24a7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv2.bin
deleted file mode 100644
index 2304c792451e65d7a6f4615060dfc0c90164dc29..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv2_bias.bin
deleted file mode 100644
index 08c01b1586c269269d8dc8951afb7cd0c02606b2..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/conv2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-è¾#¬¿ÀIº¾¹¾ë ɾ¨9D¾èÍž¾£½fn»>_Ái>ú»«¾PØ>
-þv¾K,,¾az.¾&Ѿc…:¾EÂ¥>\Œ«>œöú½á†]½ÿ%]¾ö¸P>íi<>Ðû\¾¹ã¾åÀ)¾d€™>oÞ½«‡%>ŽÎ9½zNâ½È:>Ù˜\¾¦ºj>vP>ÿ›4>‚¾ÖDà¾õ§Ð¾¶õ¬>qS¾Œ'á¾:é;zb$>Àƒu>í9w>¦˜n½+Ò¾»‘…¾“ÐS½ª
>с˜º½b¾#)¸¾’%e>=ä#<Íà¾Ã¾E‰2¾]«Y=r¦³½)*k¾ ,¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc1.bin
deleted file mode 100644
index 3f59c44723443b40667340a60ae20311133c425a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc1_bias.bin
deleted file mode 100644
index 24656c9753f9ab1b6d8b648f2fe7f3d6af24bebd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc2.bin
deleted file mode 100644
index 1b567de77acfc62b54ec4a676df8256b07a6b127..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc2_bias.bin
deleted file mode 100644
index a7c0eae24cc844613f616fda43cd444c5f506ebf..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh/lenet_tanh/fc2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-µs¾Ó˜>P5>ù>bëÈ=_Ïú½ƒA‚>Äô}>\¾+Nè<
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv1.bin
deleted file mode 100644
index 9c6404768d1262101afc967cd3b660a7e757cd25..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv1_bias.bin
deleted file mode 100644
index 8335621803cf622c0724fd437623d9277efb458c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv2.bin
deleted file mode 100644
index 09c1e72f56a144675d48e5d4969e260100c35ada..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv2_bias.bin
deleted file mode 100644
index 3d4d6e388f22cf825a1e8b434fd34080fc8912e8..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/conv2_bias.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-†¹œ½~Ä
-½Î"y½ô£½È½ès½eªf½tN¼ ¬½¨X½¨„?¼›þŸ¼ž¯½OÙ3½%§­¼ß³H½†À
-½&½JO0½¶wž½¥:o½ðе¼ý[Z½P½S‚³½ˆé?½Ä *½Š^[»`ì(½Æœ½þã½ÛV®¼¨ü˜½çtƽû¯¸º|Üv½®‰½ó®Ã¼@»¼ïÖŒ½
Hš¼ó‰»µh‹½ux½-N,½«Ú©½c†N»è¡Ž½µ¤@½ÈzÕ¼4™½.·!½ÐÊÀ¼ú®½c¬Ûº>.½xཽá¼Oû½ý+;¼¿œ•½i‹š½éµ–¼ØGP½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc1.bin
deleted file mode 100644
index 36e6a84bfba394921e4ca50c2acaba1482ea0ae1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc1_bias.bin
deleted file mode 100644
index 56442e581a16e7f76a46866274c0ea66ea8be086..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc2.bin
deleted file mode 100644
index 0ad7ae497969781128a6f98fd923655934fd217a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc2_bias.bin
deleted file mode 100644
index 4cb6d824d540d66502b5e7ab0157e567a2d1a300..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_tanh2/fc2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-©|½8ݝ;ÿÄ‚½!´½!ζ½ß[“½×ý–½$Ðm½0Œ¦½›V´½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv1.bin
deleted file mode 100644
index ca013bfaafd114694b2f83ecfc2d177fdb38990e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv1_bias.bin
deleted file mode 100644
index bbb78f121d9c2a1ae219a45cc20539a990648186..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv1_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-éÃ?”è¿åSª?*?‡%?’òJ¿a‰€¿CM]¾ ˆÎ>Íž¿Pú€?µ¿ûd»¾® y¿>‰?"™Õ?<ª¾|«ˆ¿Vr³¿~†?_!/?]Ú@B:L¿Ý¼Í>‡Å·¾<J„¾;–©?ek>rËß¾ "N?cPs¿st]¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv2.bin
deleted file mode 100644
index 621b5259648bb00ade00273d73ed30f2dc0af52c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv2_bias.bin
deleted file mode 100644
index 61ffbbc2ae7d92dd220e09ca1418898bf63f973e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/conv2_bias.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-s+á¾ F§=´ïH¿J›@*G¿Uý±¿-ôA>»¢¿tƒô½q¾Ö¯ƒ>ÒáÙ½l­Ò?é…&½æ¾cq}¿ö¼½‹@c÷o=òp?p–‡?ט?&j<ãR??&‚Ó¿w¥¿A…í¿%î>DXÁ¿ŽQ;@½¡£?µ1”?äßÀWp¿{è…¾Ù̦¾-G¿"©ˆ¿vË¥¾8Fd¿ãÔ󽐂%?ZnÕ¾7ÍM¿a·K¿ó“<9/?]/P?|‚H?y5˜¿.A?bï¿-øˆ¿A.Ù?<t¨½&òÀÅ3>óN’¿sÝ
-ÀF+è?‰;?,b«?©ýο‡ºæ>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc1.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc1.bin
deleted file mode 100644
index c8a16853b5dcf00cfaac58438ee28c8c9273b077..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc1.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc1_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc1_bias.bin
deleted file mode 100644
index 3346319c7e19432d16e3eb471b53216fa6efb162..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc1_bias.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc2.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc2.bin
deleted file mode 100644
index 19286ba29008b48b34409b963e92e8817d35e6e3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc2.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc2_bias.bin b/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc2_bias.bin
deleted file mode 100644
index 94a9ea8487c42b6b27d411ab678f64085b12fef4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/lenet_test_params/fc2_bias.bin
+++ /dev/null
@@ -1 +0,0 @@
-M‡“?#=e?N‹!¿‰ñs¿¸OÕ¿„@\?2±“¿j3?sÆ¢½º—o¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/#layer_composition.txt# b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/#layer_composition.txt#
deleted file mode 100644
index 10692997a90e4490a91ad3d0e6e04285754144fd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/#layer_composition.txt#
+++ /dev/null
@@ -1,83 +0,0 @@
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-pool  
-dense  add  
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/#layers.txt# b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/#layers.txt#
deleted file mode 100644
index 0bd2b554374c10d748a652f52e5427c716be0084..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/#layers.txt#
+++ /dev/null
@@ -1,83 +0,0 @@
-Conv1,10000,3,32,32,32,3,3,3
-#tensorBatchNorm1
-#tensorRelu1
-#tensorDepthwiseConv1
-#tensorBatchNorm2
-#tensorRelu2
-Conv2,10000,32,32,32,64,32,1,1
-#tensorBatchNorm3
-#tensorRelu3
-#tensorDepthwiseConv2
-#tensorBatchNorm4
-#tensorRelu4
-Conv3,10000,64,16,16,128,64,1,1
-#tensorBatchNorm5
-#tensorRelu5
-#tensorDepthwiseConv3
-#tensorBatchNorm6
-#tensorRelu6
-Conv4,10000,128,16,16,128,128,1,1
-#tensorBatchNorm7
-#tensorRelu7
-#tensorDepthwiseConv4
-#tensorBatchNorm8
-#tensorRelu8
-Conv5,10000,128,8,8,256,128,1,1
-#tensorBatchNorm9
-#tensorRelu9
-#tensorDepthwiseConv5
-#tensorBatchNorm10
-#tensorRelu10
-Conv6,10000,256,8,8,256,256,1,1
-#tensorBatchNorm11
-#tensorRelu11
-#tensorDepthwiseConv6
-#tensorBatchNorm12
-#tensorRelu12
-Conv7,10000,256,4,4,512,256,1,1
-#tensorBatchNorm13
-#tensorRelu13
-#tensorDepthwiseConv7
-#tensorBatchNorm14
-#tensorRelu14
-Conv8,10000,512,4,4,512,512,1,1
-#tensorBatchNorm15
-#tensorRelu15
-#tensorDepthwiseConv8
-#tensorBatchNorm16
-#tensorRelu16
-Conv9,10000,512,4,4,512,512,1,1
-#tensorBatchNorm17
-#tensorRelu17
-#tensorDepthwiseConv9
-#tensorBatchNorm18
-#tensorRelu18
-Conv10,10000,512,4,4,512,512,1,1
-#tensorBatchNorm19
-#tensorRelu19
-#tensorDepthwiseConv10
-#tensorBatchNorm20
-#tensorRelu20
-Conv11,10000,512,4,4,512,512,1,1
-#tensorBatchNorm21
-#tensorRelu21
-#tensorDepthwiseConv11
-#tensorBatchNorm22
-#tensorRelu22
-Conv12,10000,512,4,4,512,512,1,1
-#tensorBatchNorm23
-#tensorRelu23
-#tensorDepthwiseConv12
-#tensorBatchNorm24
-#tensorRelu24
-Conv13,10000,512,2,2,1024,512,1,1
-#tensorBatchNorm25
-#tensorRelu25
-#tensorDepthwiseConv13
-#tensorBatchNorm26
-#tensorRelu26
-Conv14,10000,1024,2,2,1024,1024,1,1
-#tensorBatchNorm27
-#tensorRelu27
-#tensorPooling1
-FC1,10000,1024,1024,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/approxhpvm_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/approxhpvm_src.cc
deleted file mode 100644
index 5089eb912bcb5335c96c04f6d98f5d17ab761c72..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/approxhpvm_src.cc
+++ /dev/null
@@ -1,2400 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/stat.h> 
-#include <cstring> 
-#include <visc.h> 
-#include <tensorTypes.h> 
-#include <tensorUtils.h> 
-
-void var_0_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_1_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_2_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_3_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 32); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_4_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_5_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_6_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_7_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_8_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_9_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 64); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_10_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_11_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_12_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_13_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_14_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_15_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 128); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_16_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_17_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_18_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_19_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_20_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_21_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 128); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_22_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_23_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_24_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_25_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_26_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_27_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 256); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_28_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_29_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_30_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_31_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_32_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_33_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 256); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_34_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_35_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_36_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_37_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_38_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_39_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_40_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_41_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_42_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_43_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_44_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_45_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_46_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_47_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_48_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_49_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_50_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_51_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_52_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_53_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_54_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_55_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_56_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_57_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_58_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_59_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_60_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_61_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_62_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_63_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_64_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_65_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_66_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_67_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_68_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_69_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_70_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_71_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_72_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_73_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_74_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_75_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 1024); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_76_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_77_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_78_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_79_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_80_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_81_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_avg(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_82_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_mul(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_83_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_84_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_softmax(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void root(void* input, size_t input_bytes, 
-	  void* conv2d_1_w, size_t conv2d_1_w_bytes, 
-	  void* batch_normalization_1_gamma, size_t batch_normalization_1_gamma_bytes, 
-	  void* batch_normalization_1_beta, size_t batch_normalization_1_beta_bytes, 
-	  void* batch_normalization_1_mean, size_t batch_normalization_1_mean_bytes, 
-	  void* batch_normalization_1_variance, size_t batch_normalization_1_variance_bytes, 
-	  void* depthwise_conv2d_1_w, size_t depthwise_conv2d_1_w_bytes, 
-	  void* batch_normalization_2_gamma, size_t batch_normalization_2_gamma_bytes, 
-	  void* batch_normalization_2_beta, size_t batch_normalization_2_beta_bytes, 
-	  void* batch_normalization_2_mean, size_t batch_normalization_2_mean_bytes, 
-	  void* batch_normalization_2_variance, size_t batch_normalization_2_variance_bytes, 
-	  void* conv2d_2_w, size_t conv2d_2_w_bytes, 
-	  void* batch_normalization_3_gamma, size_t batch_normalization_3_gamma_bytes, 
-	  void* batch_normalization_3_beta, size_t batch_normalization_3_beta_bytes, 
-	  void* batch_normalization_3_mean, size_t batch_normalization_3_mean_bytes, 
-	  void* batch_normalization_3_variance, size_t batch_normalization_3_variance_bytes, 
-	  void* depthwise_conv2d_2_w, size_t depthwise_conv2d_2_w_bytes, 
-	  void* batch_normalization_4_gamma, size_t batch_normalization_4_gamma_bytes, 
-	  void* batch_normalization_4_beta, size_t batch_normalization_4_beta_bytes, 
-	  void* batch_normalization_4_mean, size_t batch_normalization_4_mean_bytes, 
-	  void* batch_normalization_4_variance, size_t batch_normalization_4_variance_bytes, 
-	  void* conv2d_3_w, size_t conv2d_3_w_bytes, 
-	  void* batch_normalization_5_gamma, size_t batch_normalization_5_gamma_bytes, 
-	  void* batch_normalization_5_beta, size_t batch_normalization_5_beta_bytes, 
-	  void* batch_normalization_5_mean, size_t batch_normalization_5_mean_bytes, 
-	  void* batch_normalization_5_variance, size_t batch_normalization_5_variance_bytes, 
-	  void* depthwise_conv2d_3_w, size_t depthwise_conv2d_3_w_bytes, 
-	  void* batch_normalization_6_gamma, size_t batch_normalization_6_gamma_bytes, 
-	  void* batch_normalization_6_beta, size_t batch_normalization_6_beta_bytes, 
-	  void* batch_normalization_6_mean, size_t batch_normalization_6_mean_bytes, 
-	  void* batch_normalization_6_variance, size_t batch_normalization_6_variance_bytes, 
-	  void* conv2d_4_w, size_t conv2d_4_w_bytes, 
-	  void* batch_normalization_7_gamma, size_t batch_normalization_7_gamma_bytes, 
-	  void* batch_normalization_7_beta, size_t batch_normalization_7_beta_bytes, 
-	  void* batch_normalization_7_mean, size_t batch_normalization_7_mean_bytes, 
-	  void* batch_normalization_7_variance, size_t batch_normalization_7_variance_bytes, 
-	  void* depthwise_conv2d_4_w, size_t depthwise_conv2d_4_w_bytes, 
-	  void* batch_normalization_8_gamma, size_t batch_normalization_8_gamma_bytes, 
-	  void* batch_normalization_8_beta, size_t batch_normalization_8_beta_bytes, 
-	  void* batch_normalization_8_mean, size_t batch_normalization_8_mean_bytes, 
-	  void* batch_normalization_8_variance, size_t batch_normalization_8_variance_bytes, 
-	  void* conv2d_5_w, size_t conv2d_5_w_bytes, 
-	  void* batch_normalization_9_gamma, size_t batch_normalization_9_gamma_bytes, 
-	  void* batch_normalization_9_beta, size_t batch_normalization_9_beta_bytes, 
-	  void* batch_normalization_9_mean, size_t batch_normalization_9_mean_bytes, 
-	  void* batch_normalization_9_variance, size_t batch_normalization_9_variance_bytes, 
-	  void* depthwise_conv2d_5_w, size_t depthwise_conv2d_5_w_bytes, 
-	  void* batch_normalization_10_gamma, size_t batch_normalization_10_gamma_bytes, 
-	  void* batch_normalization_10_beta, size_t batch_normalization_10_beta_bytes, 
-	  void* batch_normalization_10_mean, size_t batch_normalization_10_mean_bytes, 
-	  void* batch_normalization_10_variance, size_t batch_normalization_10_variance_bytes, 
-	  void* conv2d_6_w, size_t conv2d_6_w_bytes, 
-	  void* batch_normalization_11_gamma, size_t batch_normalization_11_gamma_bytes, 
-	  void* batch_normalization_11_beta, size_t batch_normalization_11_beta_bytes, 
-	  void* batch_normalization_11_mean, size_t batch_normalization_11_mean_bytes, 
-	  void* batch_normalization_11_variance, size_t batch_normalization_11_variance_bytes, 
-	  void* depthwise_conv2d_6_w, size_t depthwise_conv2d_6_w_bytes, 
-	  void* batch_normalization_12_gamma, size_t batch_normalization_12_gamma_bytes, 
-	  void* batch_normalization_12_beta, size_t batch_normalization_12_beta_bytes, 
-	  void* batch_normalization_12_mean, size_t batch_normalization_12_mean_bytes, 
-	  void* batch_normalization_12_variance, size_t batch_normalization_12_variance_bytes, 
-	  void* conv2d_7_w, size_t conv2d_7_w_bytes, 
-	  void* batch_normalization_13_gamma, size_t batch_normalization_13_gamma_bytes, 
-	  void* batch_normalization_13_beta, size_t batch_normalization_13_beta_bytes, 
-	  void* batch_normalization_13_mean, size_t batch_normalization_13_mean_bytes, 
-	  void* batch_normalization_13_variance, size_t batch_normalization_13_variance_bytes, 
-	  void* depthwise_conv2d_7_w, size_t depthwise_conv2d_7_w_bytes, 
-	  void* batch_normalization_14_gamma, size_t batch_normalization_14_gamma_bytes, 
-	  void* batch_normalization_14_beta, size_t batch_normalization_14_beta_bytes, 
-	  void* batch_normalization_14_mean, size_t batch_normalization_14_mean_bytes, 
-	  void* batch_normalization_14_variance, size_t batch_normalization_14_variance_bytes, 
-	  void* conv2d_8_w, size_t conv2d_8_w_bytes, 
-	  void* batch_normalization_15_gamma, size_t batch_normalization_15_gamma_bytes, 
-	  void* batch_normalization_15_beta, size_t batch_normalization_15_beta_bytes, 
-	  void* batch_normalization_15_mean, size_t batch_normalization_15_mean_bytes, 
-	  void* batch_normalization_15_variance, size_t batch_normalization_15_variance_bytes, 
-	  void* depthwise_conv2d_8_w, size_t depthwise_conv2d_8_w_bytes, 
-	  void* batch_normalization_16_gamma, size_t batch_normalization_16_gamma_bytes, 
-	  void* batch_normalization_16_beta, size_t batch_normalization_16_beta_bytes, 
-	  void* batch_normalization_16_mean, size_t batch_normalization_16_mean_bytes, 
-	  void* batch_normalization_16_variance, size_t batch_normalization_16_variance_bytes, 
-	  void* conv2d_9_w, size_t conv2d_9_w_bytes, 
-	  void* batch_normalization_17_gamma, size_t batch_normalization_17_gamma_bytes, 
-	  void* batch_normalization_17_beta, size_t batch_normalization_17_beta_bytes, 
-	  void* batch_normalization_17_mean, size_t batch_normalization_17_mean_bytes, 
-	  void* batch_normalization_17_variance, size_t batch_normalization_17_variance_bytes, 
-	  void* depthwise_conv2d_9_w, size_t depthwise_conv2d_9_w_bytes, 
-	  void* batch_normalization_18_gamma, size_t batch_normalization_18_gamma_bytes, 
-	  void* batch_normalization_18_beta, size_t batch_normalization_18_beta_bytes, 
-	  void* batch_normalization_18_mean, size_t batch_normalization_18_mean_bytes, 
-	  void* batch_normalization_18_variance, size_t batch_normalization_18_variance_bytes, 
-	  void* conv2d_10_w, size_t conv2d_10_w_bytes, 
-	  void* batch_normalization_19_gamma, size_t batch_normalization_19_gamma_bytes, 
-	  void* batch_normalization_19_beta, size_t batch_normalization_19_beta_bytes, 
-	  void* batch_normalization_19_mean, size_t batch_normalization_19_mean_bytes, 
-	  void* batch_normalization_19_variance, size_t batch_normalization_19_variance_bytes, 
-	  void* depthwise_conv2d_10_w, size_t depthwise_conv2d_10_w_bytes, 
-	  void* batch_normalization_20_gamma, size_t batch_normalization_20_gamma_bytes, 
-	  void* batch_normalization_20_beta, size_t batch_normalization_20_beta_bytes, 
-	  void* batch_normalization_20_mean, size_t batch_normalization_20_mean_bytes, 
-	  void* batch_normalization_20_variance, size_t batch_normalization_20_variance_bytes, 
-	  void* conv2d_11_w, size_t conv2d_11_w_bytes, 
-	  void* batch_normalization_21_gamma, size_t batch_normalization_21_gamma_bytes, 
-	  void* batch_normalization_21_beta, size_t batch_normalization_21_beta_bytes, 
-	  void* batch_normalization_21_mean, size_t batch_normalization_21_mean_bytes, 
-	  void* batch_normalization_21_variance, size_t batch_normalization_21_variance_bytes, 
-	  void* depthwise_conv2d_11_w, size_t depthwise_conv2d_11_w_bytes, 
-	  void* batch_normalization_22_gamma, size_t batch_normalization_22_gamma_bytes, 
-	  void* batch_normalization_22_beta, size_t batch_normalization_22_beta_bytes, 
-	  void* batch_normalization_22_mean, size_t batch_normalization_22_mean_bytes, 
-	  void* batch_normalization_22_variance, size_t batch_normalization_22_variance_bytes, 
-	  void* conv2d_12_w, size_t conv2d_12_w_bytes, 
-	  void* batch_normalization_23_gamma, size_t batch_normalization_23_gamma_bytes, 
-	  void* batch_normalization_23_beta, size_t batch_normalization_23_beta_bytes, 
-	  void* batch_normalization_23_mean, size_t batch_normalization_23_mean_bytes, 
-	  void* batch_normalization_23_variance, size_t batch_normalization_23_variance_bytes, 
-	  void* depthwise_conv2d_12_w, size_t depthwise_conv2d_12_w_bytes, 
-	  void* batch_normalization_24_gamma, size_t batch_normalization_24_gamma_bytes, 
-	  void* batch_normalization_24_beta, size_t batch_normalization_24_beta_bytes, 
-	  void* batch_normalization_24_mean, size_t batch_normalization_24_mean_bytes, 
-	  void* batch_normalization_24_variance, size_t batch_normalization_24_variance_bytes, 
-	  void* conv2d_13_w, size_t conv2d_13_w_bytes, 
-	  void* batch_normalization_25_gamma, size_t batch_normalization_25_gamma_bytes, 
-	  void* batch_normalization_25_beta, size_t batch_normalization_25_beta_bytes, 
-	  void* batch_normalization_25_mean, size_t batch_normalization_25_mean_bytes, 
-	  void* batch_normalization_25_variance, size_t batch_normalization_25_variance_bytes, 
-	  void* depthwise_conv2d_13_w, size_t depthwise_conv2d_13_w_bytes, 
-	  void* batch_normalization_26_gamma, size_t batch_normalization_26_gamma_bytes, 
-	  void* batch_normalization_26_beta, size_t batch_normalization_26_beta_bytes, 
-	  void* batch_normalization_26_mean, size_t batch_normalization_26_mean_bytes, 
-	  void* batch_normalization_26_variance, size_t batch_normalization_26_variance_bytes, 
-	  void* conv2d_14_w, size_t conv2d_14_w_bytes, 
-	  void* batch_normalization_27_gamma, size_t batch_normalization_27_gamma_bytes, 
-	  void* batch_normalization_27_beta, size_t batch_normalization_27_beta_bytes, 
-	  void* batch_normalization_27_mean, size_t batch_normalization_27_mean_bytes, 
-	  void* batch_normalization_27_variance, size_t batch_normalization_27_variance_bytes, 
-	  void* dense_1_w, size_t dense_1_w_bytes, 
-	  void* dense_1_b, size_t dense_1_b_bytes){ 
-
-
-  __visc__hint(visc::CPU_TARGET); 
-  __visc__attributes(138, input, conv2d_1_w, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, depthwise_conv2d_1_w, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, conv2d_2_w, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, depthwise_conv2d_2_w, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, conv2d_3_w, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, depthwise_conv2d_3_w, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, conv2d_4_w, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, depthwise_conv2d_4_w, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, conv2d_5_w, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, depthwise_conv2d_5_w, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, conv2d_6_w, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, depthwise_conv2d_6_w, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, conv2d_7_w, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, depthwise_conv2d_7_w, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, conv2d_8_w, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, depthwise_conv2d_8_w, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, conv2d_9_w, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, depthwise_conv2d_9_w, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, conv2d_10_w, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, depthwise_conv2d_10_w, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, conv2d_11_w, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, depthwise_conv2d_11_w, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, conv2d_12_w, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, depthwise_conv2d_12_w, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, conv2d_13_w, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, depthwise_conv2d_13_w, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, conv2d_14_w, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, dense_1_w, dense_1_b, 0); 
-
-
-  void* var_0 = __visc__createNodeND(0, var_0_node); 
-
-  __visc__bindIn(var_0, 0, 0, 0); 
-  __visc__bindIn(var_0, 1, 1, 0); 
-  __visc__bindIn(var_0, 2, 2, 0); 
-  __visc__bindIn(var_0, 3, 3, 0); 
-
-  void* var_1 = __visc__createNodeND(0, var_1_node); 
-
-  __visc__edge(var_0, var_1, 1, 0, 0, 0); 
-  __visc__edge(var_0, var_1, 1, 1, 1, 0); 
-  __visc__bindIn(var_1, 4, 2, 0); 
-  __visc__bindIn(var_1, 5, 3, 0); 
-  __visc__bindIn(var_1, 6, 4, 0); 
-  __visc__bindIn(var_1, 7, 5, 0); 
-  __visc__bindIn(var_1, 8, 6, 0); 
-  __visc__bindIn(var_1, 9, 7, 0); 
-  __visc__bindIn(var_1, 10, 8, 0); 
-  __visc__bindIn(var_1, 11, 9, 0); 
-
-  void* var_2 = __visc__createNodeND(0, var_2_node); 
-
-  __visc__edge(var_1, var_2, 1, 0, 0, 0); 
-  __visc__edge(var_1, var_2, 1, 1, 1, 0); 
-
-  void* var_3 = __visc__createNodeND(0, var_3_node); 
-
-  __visc__edge(var_2, var_3, 1, 0, 0, 0); 
-  __visc__edge(var_2, var_3, 1, 1, 1, 0); 
-  __visc__bindIn(var_3, 12, 2, 0); 
-  __visc__bindIn(var_3, 13, 3, 0); 
-
-  void* var_4 = __visc__createNodeND(0, var_4_node); 
-
-  __visc__edge(var_3, var_4, 1, 0, 0, 0); 
-  __visc__edge(var_3, var_4, 1, 1, 1, 0); 
-  __visc__bindIn(var_4, 14, 2, 0); 
-  __visc__bindIn(var_4, 15, 3, 0); 
-  __visc__bindIn(var_4, 16, 4, 0); 
-  __visc__bindIn(var_4, 17, 5, 0); 
-  __visc__bindIn(var_4, 18, 6, 0); 
-  __visc__bindIn(var_4, 19, 7, 0); 
-  __visc__bindIn(var_4, 20, 8, 0); 
-  __visc__bindIn(var_4, 21, 9, 0); 
-
-  void* var_5 = __visc__createNodeND(0, var_5_node); 
-
-  __visc__edge(var_4, var_5, 1, 0, 0, 0); 
-  __visc__edge(var_4, var_5, 1, 1, 1, 0); 
-
-  void* var_6 = __visc__createNodeND(0, var_6_node); 
-
-  __visc__edge(var_5, var_6, 1, 0, 0, 0); 
-  __visc__edge(var_5, var_6, 1, 1, 1, 0); 
-  __visc__bindIn(var_6, 22, 2, 0); 
-  __visc__bindIn(var_6, 23, 3, 0); 
-
-  void* var_7 = __visc__createNodeND(0, var_7_node); 
-
-  __visc__edge(var_6, var_7, 1, 0, 0, 0); 
-  __visc__edge(var_6, var_7, 1, 1, 1, 0); 
-  __visc__bindIn(var_7, 24, 2, 0); 
-  __visc__bindIn(var_7, 25, 3, 0); 
-  __visc__bindIn(var_7, 26, 4, 0); 
-  __visc__bindIn(var_7, 27, 5, 0); 
-  __visc__bindIn(var_7, 28, 6, 0); 
-  __visc__bindIn(var_7, 29, 7, 0); 
-  __visc__bindIn(var_7, 30, 8, 0); 
-  __visc__bindIn(var_7, 31, 9, 0); 
-
-  void* var_8 = __visc__createNodeND(0, var_8_node); 
-
-  __visc__edge(var_7, var_8, 1, 0, 0, 0); 
-  __visc__edge(var_7, var_8, 1, 1, 1, 0); 
-
-  void* var_9 = __visc__createNodeND(0, var_9_node); 
-
-  __visc__edge(var_8, var_9, 1, 0, 0, 0); 
-  __visc__edge(var_8, var_9, 1, 1, 1, 0); 
-  __visc__bindIn(var_9, 32, 2, 0); 
-  __visc__bindIn(var_9, 33, 3, 0); 
-
-  void* var_10 = __visc__createNodeND(0, var_10_node); 
-
-  __visc__edge(var_9, var_10, 1, 0, 0, 0); 
-  __visc__edge(var_9, var_10, 1, 1, 1, 0); 
-  __visc__bindIn(var_10, 34, 2, 0); 
-  __visc__bindIn(var_10, 35, 3, 0); 
-  __visc__bindIn(var_10, 36, 4, 0); 
-  __visc__bindIn(var_10, 37, 5, 0); 
-  __visc__bindIn(var_10, 38, 6, 0); 
-  __visc__bindIn(var_10, 39, 7, 0); 
-  __visc__bindIn(var_10, 40, 8, 0); 
-  __visc__bindIn(var_10, 41, 9, 0); 
-
-  void* var_11 = __visc__createNodeND(0, var_11_node); 
-
-  __visc__edge(var_10, var_11, 1, 0, 0, 0); 
-  __visc__edge(var_10, var_11, 1, 1, 1, 0); 
-
-  void* var_12 = __visc__createNodeND(0, var_12_node); 
-
-  __visc__edge(var_11, var_12, 1, 0, 0, 0); 
-  __visc__edge(var_11, var_12, 1, 1, 1, 0); 
-  __visc__bindIn(var_12, 42, 2, 0); 
-  __visc__bindIn(var_12, 43, 3, 0); 
-
-  void* var_13 = __visc__createNodeND(0, var_13_node); 
-
-  __visc__edge(var_12, var_13, 1, 0, 0, 0); 
-  __visc__edge(var_12, var_13, 1, 1, 1, 0); 
-  __visc__bindIn(var_13, 44, 2, 0); 
-  __visc__bindIn(var_13, 45, 3, 0); 
-  __visc__bindIn(var_13, 46, 4, 0); 
-  __visc__bindIn(var_13, 47, 5, 0); 
-  __visc__bindIn(var_13, 48, 6, 0); 
-  __visc__bindIn(var_13, 49, 7, 0); 
-  __visc__bindIn(var_13, 50, 8, 0); 
-  __visc__bindIn(var_13, 51, 9, 0); 
-
-  void* var_14 = __visc__createNodeND(0, var_14_node); 
-
-  __visc__edge(var_13, var_14, 1, 0, 0, 0); 
-  __visc__edge(var_13, var_14, 1, 1, 1, 0); 
-
-  void* var_15 = __visc__createNodeND(0, var_15_node); 
-
-  __visc__edge(var_14, var_15, 1, 0, 0, 0); 
-  __visc__edge(var_14, var_15, 1, 1, 1, 0); 
-  __visc__bindIn(var_15, 52, 2, 0); 
-  __visc__bindIn(var_15, 53, 3, 0); 
-
-  void* var_16 = __visc__createNodeND(0, var_16_node); 
-
-  __visc__edge(var_15, var_16, 1, 0, 0, 0); 
-  __visc__edge(var_15, var_16, 1, 1, 1, 0); 
-  __visc__bindIn(var_16, 54, 2, 0); 
-  __visc__bindIn(var_16, 55, 3, 0); 
-  __visc__bindIn(var_16, 56, 4, 0); 
-  __visc__bindIn(var_16, 57, 5, 0); 
-  __visc__bindIn(var_16, 58, 6, 0); 
-  __visc__bindIn(var_16, 59, 7, 0); 
-  __visc__bindIn(var_16, 60, 8, 0); 
-  __visc__bindIn(var_16, 61, 9, 0); 
-
-  void* var_17 = __visc__createNodeND(0, var_17_node); 
-
-  __visc__edge(var_16, var_17, 1, 0, 0, 0); 
-  __visc__edge(var_16, var_17, 1, 1, 1, 0); 
-
-  void* var_18 = __visc__createNodeND(0, var_18_node); 
-
-  __visc__edge(var_17, var_18, 1, 0, 0, 0); 
-  __visc__edge(var_17, var_18, 1, 1, 1, 0); 
-  __visc__bindIn(var_18, 62, 2, 0); 
-  __visc__bindIn(var_18, 63, 3, 0); 
-
-  void* var_19 = __visc__createNodeND(0, var_19_node); 
-
-  __visc__edge(var_18, var_19, 1, 0, 0, 0); 
-  __visc__edge(var_18, var_19, 1, 1, 1, 0); 
-  __visc__bindIn(var_19, 64, 2, 0); 
-  __visc__bindIn(var_19, 65, 3, 0); 
-  __visc__bindIn(var_19, 66, 4, 0); 
-  __visc__bindIn(var_19, 67, 5, 0); 
-  __visc__bindIn(var_19, 68, 6, 0); 
-  __visc__bindIn(var_19, 69, 7, 0); 
-  __visc__bindIn(var_19, 70, 8, 0); 
-  __visc__bindIn(var_19, 71, 9, 0); 
-
-  void* var_20 = __visc__createNodeND(0, var_20_node); 
-
-  __visc__edge(var_19, var_20, 1, 0, 0, 0); 
-  __visc__edge(var_19, var_20, 1, 1, 1, 0); 
-
-  void* var_21 = __visc__createNodeND(0, var_21_node); 
-
-  __visc__edge(var_20, var_21, 1, 0, 0, 0); 
-  __visc__edge(var_20, var_21, 1, 1, 1, 0); 
-  __visc__bindIn(var_21, 72, 2, 0); 
-  __visc__bindIn(var_21, 73, 3, 0); 
-
-  void* var_22 = __visc__createNodeND(0, var_22_node); 
-
-  __visc__edge(var_21, var_22, 1, 0, 0, 0); 
-  __visc__edge(var_21, var_22, 1, 1, 1, 0); 
-  __visc__bindIn(var_22, 74, 2, 0); 
-  __visc__bindIn(var_22, 75, 3, 0); 
-  __visc__bindIn(var_22, 76, 4, 0); 
-  __visc__bindIn(var_22, 77, 5, 0); 
-  __visc__bindIn(var_22, 78, 6, 0); 
-  __visc__bindIn(var_22, 79, 7, 0); 
-  __visc__bindIn(var_22, 80, 8, 0); 
-  __visc__bindIn(var_22, 81, 9, 0); 
-
-  void* var_23 = __visc__createNodeND(0, var_23_node); 
-
-  __visc__edge(var_22, var_23, 1, 0, 0, 0); 
-  __visc__edge(var_22, var_23, 1, 1, 1, 0); 
-
-  void* var_24 = __visc__createNodeND(0, var_24_node); 
-
-  __visc__edge(var_23, var_24, 1, 0, 0, 0); 
-  __visc__edge(var_23, var_24, 1, 1, 1, 0); 
-  __visc__bindIn(var_24, 82, 2, 0); 
-  __visc__bindIn(var_24, 83, 3, 0); 
-
-  void* var_25 = __visc__createNodeND(0, var_25_node); 
-
-  __visc__edge(var_24, var_25, 1, 0, 0, 0); 
-  __visc__edge(var_24, var_25, 1, 1, 1, 0); 
-  __visc__bindIn(var_25, 84, 2, 0); 
-  __visc__bindIn(var_25, 85, 3, 0); 
-  __visc__bindIn(var_25, 86, 4, 0); 
-  __visc__bindIn(var_25, 87, 5, 0); 
-  __visc__bindIn(var_25, 88, 6, 0); 
-  __visc__bindIn(var_25, 89, 7, 0); 
-  __visc__bindIn(var_25, 90, 8, 0); 
-  __visc__bindIn(var_25, 91, 9, 0); 
-
-  void* var_26 = __visc__createNodeND(0, var_26_node); 
-
-  __visc__edge(var_25, var_26, 1, 0, 0, 0); 
-  __visc__edge(var_25, var_26, 1, 1, 1, 0); 
-
-  void* var_27 = __visc__createNodeND(0, var_27_node); 
-
-  __visc__edge(var_26, var_27, 1, 0, 0, 0); 
-  __visc__edge(var_26, var_27, 1, 1, 1, 0); 
-  __visc__bindIn(var_27, 92, 2, 0); 
-  __visc__bindIn(var_27, 93, 3, 0); 
-
-  void* var_28 = __visc__createNodeND(0, var_28_node); 
-
-  __visc__edge(var_27, var_28, 1, 0, 0, 0); 
-  __visc__edge(var_27, var_28, 1, 1, 1, 0); 
-  __visc__bindIn(var_28, 94, 2, 0); 
-  __visc__bindIn(var_28, 95, 3, 0); 
-  __visc__bindIn(var_28, 96, 4, 0); 
-  __visc__bindIn(var_28, 97, 5, 0); 
-  __visc__bindIn(var_28, 98, 6, 0); 
-  __visc__bindIn(var_28, 99, 7, 0); 
-  __visc__bindIn(var_28, 100, 8, 0); 
-  __visc__bindIn(var_28, 101, 9, 0); 
-
-  void* var_29 = __visc__createNodeND(0, var_29_node); 
-
-  __visc__edge(var_28, var_29, 1, 0, 0, 0); 
-  __visc__edge(var_28, var_29, 1, 1, 1, 0); 
-
-  void* var_30 = __visc__createNodeND(0, var_30_node); 
-
-  __visc__edge(var_29, var_30, 1, 0, 0, 0); 
-  __visc__edge(var_29, var_30, 1, 1, 1, 0); 
-  __visc__bindIn(var_30, 102, 2, 0); 
-  __visc__bindIn(var_30, 103, 3, 0); 
-
-  void* var_31 = __visc__createNodeND(0, var_31_node); 
-
-  __visc__edge(var_30, var_31, 1, 0, 0, 0); 
-  __visc__edge(var_30, var_31, 1, 1, 1, 0); 
-  __visc__bindIn(var_31, 104, 2, 0); 
-  __visc__bindIn(var_31, 105, 3, 0); 
-  __visc__bindIn(var_31, 106, 4, 0); 
-  __visc__bindIn(var_31, 107, 5, 0); 
-  __visc__bindIn(var_31, 108, 6, 0); 
-  __visc__bindIn(var_31, 109, 7, 0); 
-  __visc__bindIn(var_31, 110, 8, 0); 
-  __visc__bindIn(var_31, 111, 9, 0); 
-
-  void* var_32 = __visc__createNodeND(0, var_32_node); 
-
-  __visc__edge(var_31, var_32, 1, 0, 0, 0); 
-  __visc__edge(var_31, var_32, 1, 1, 1, 0); 
-
-  void* var_33 = __visc__createNodeND(0, var_33_node); 
-
-  __visc__edge(var_32, var_33, 1, 0, 0, 0); 
-  __visc__edge(var_32, var_33, 1, 1, 1, 0); 
-  __visc__bindIn(var_33, 112, 2, 0); 
-  __visc__bindIn(var_33, 113, 3, 0); 
-
-  void* var_34 = __visc__createNodeND(0, var_34_node); 
-
-  __visc__edge(var_33, var_34, 1, 0, 0, 0); 
-  __visc__edge(var_33, var_34, 1, 1, 1, 0); 
-  __visc__bindIn(var_34, 114, 2, 0); 
-  __visc__bindIn(var_34, 115, 3, 0); 
-  __visc__bindIn(var_34, 116, 4, 0); 
-  __visc__bindIn(var_34, 117, 5, 0); 
-  __visc__bindIn(var_34, 118, 6, 0); 
-  __visc__bindIn(var_34, 119, 7, 0); 
-  __visc__bindIn(var_34, 120, 8, 0); 
-  __visc__bindIn(var_34, 121, 9, 0); 
-
-  void* var_35 = __visc__createNodeND(0, var_35_node); 
-
-  __visc__edge(var_34, var_35, 1, 0, 0, 0); 
-  __visc__edge(var_34, var_35, 1, 1, 1, 0); 
-
-  void* var_36 = __visc__createNodeND(0, var_36_node); 
-
-  __visc__edge(var_35, var_36, 1, 0, 0, 0); 
-  __visc__edge(var_35, var_36, 1, 1, 1, 0); 
-  __visc__bindIn(var_36, 122, 2, 0); 
-  __visc__bindIn(var_36, 123, 3, 0); 
-
-  void* var_37 = __visc__createNodeND(0, var_37_node); 
-
-  __visc__edge(var_36, var_37, 1, 0, 0, 0); 
-  __visc__edge(var_36, var_37, 1, 1, 1, 0); 
-  __visc__bindIn(var_37, 124, 2, 0); 
-  __visc__bindIn(var_37, 125, 3, 0); 
-  __visc__bindIn(var_37, 126, 4, 0); 
-  __visc__bindIn(var_37, 127, 5, 0); 
-  __visc__bindIn(var_37, 128, 6, 0); 
-  __visc__bindIn(var_37, 129, 7, 0); 
-  __visc__bindIn(var_37, 130, 8, 0); 
-  __visc__bindIn(var_37, 131, 9, 0); 
-
-  void* var_38 = __visc__createNodeND(0, var_38_node); 
-
-  __visc__edge(var_37, var_38, 1, 0, 0, 0); 
-  __visc__edge(var_37, var_38, 1, 1, 1, 0); 
-
-  void* var_39 = __visc__createNodeND(0, var_39_node); 
-
-  __visc__edge(var_38, var_39, 1, 0, 0, 0); 
-  __visc__edge(var_38, var_39, 1, 1, 1, 0); 
-  __visc__bindIn(var_39, 132, 2, 0); 
-  __visc__bindIn(var_39, 133, 3, 0); 
-
-  void* var_40 = __visc__createNodeND(0, var_40_node); 
-
-  __visc__edge(var_39, var_40, 1, 0, 0, 0); 
-  __visc__edge(var_39, var_40, 1, 1, 1, 0); 
-  __visc__bindIn(var_40, 134, 2, 0); 
-  __visc__bindIn(var_40, 135, 3, 0); 
-  __visc__bindIn(var_40, 136, 4, 0); 
-  __visc__bindIn(var_40, 137, 5, 0); 
-  __visc__bindIn(var_40, 138, 6, 0); 
-  __visc__bindIn(var_40, 139, 7, 0); 
-  __visc__bindIn(var_40, 140, 8, 0); 
-  __visc__bindIn(var_40, 141, 9, 0); 
-
-  void* var_41 = __visc__createNodeND(0, var_41_node); 
-
-  __visc__edge(var_40, var_41, 1, 0, 0, 0); 
-  __visc__edge(var_40, var_41, 1, 1, 1, 0); 
-
-  void* var_42 = __visc__createNodeND(0, var_42_node); 
-
-  __visc__edge(var_41, var_42, 1, 0, 0, 0); 
-  __visc__edge(var_41, var_42, 1, 1, 1, 0); 
-  __visc__bindIn(var_42, 142, 2, 0); 
-  __visc__bindIn(var_42, 143, 3, 0); 
-
-  void* var_43 = __visc__createNodeND(0, var_43_node); 
-
-  __visc__edge(var_42, var_43, 1, 0, 0, 0); 
-  __visc__edge(var_42, var_43, 1, 1, 1, 0); 
-  __visc__bindIn(var_43, 144, 2, 0); 
-  __visc__bindIn(var_43, 145, 3, 0); 
-  __visc__bindIn(var_43, 146, 4, 0); 
-  __visc__bindIn(var_43, 147, 5, 0); 
-  __visc__bindIn(var_43, 148, 6, 0); 
-  __visc__bindIn(var_43, 149, 7, 0); 
-  __visc__bindIn(var_43, 150, 8, 0); 
-  __visc__bindIn(var_43, 151, 9, 0); 
-
-  void* var_44 = __visc__createNodeND(0, var_44_node); 
-
-  __visc__edge(var_43, var_44, 1, 0, 0, 0); 
-  __visc__edge(var_43, var_44, 1, 1, 1, 0); 
-
-  void* var_45 = __visc__createNodeND(0, var_45_node); 
-
-  __visc__edge(var_44, var_45, 1, 0, 0, 0); 
-  __visc__edge(var_44, var_45, 1, 1, 1, 0); 
-  __visc__bindIn(var_45, 152, 2, 0); 
-  __visc__bindIn(var_45, 153, 3, 0); 
-
-  void* var_46 = __visc__createNodeND(0, var_46_node); 
-
-  __visc__edge(var_45, var_46, 1, 0, 0, 0); 
-  __visc__edge(var_45, var_46, 1, 1, 1, 0); 
-  __visc__bindIn(var_46, 154, 2, 0); 
-  __visc__bindIn(var_46, 155, 3, 0); 
-  __visc__bindIn(var_46, 156, 4, 0); 
-  __visc__bindIn(var_46, 157, 5, 0); 
-  __visc__bindIn(var_46, 158, 6, 0); 
-  __visc__bindIn(var_46, 159, 7, 0); 
-  __visc__bindIn(var_46, 160, 8, 0); 
-  __visc__bindIn(var_46, 161, 9, 0); 
-
-  void* var_47 = __visc__createNodeND(0, var_47_node); 
-
-  __visc__edge(var_46, var_47, 1, 0, 0, 0); 
-  __visc__edge(var_46, var_47, 1, 1, 1, 0); 
-
-  void* var_48 = __visc__createNodeND(0, var_48_node); 
-
-  __visc__edge(var_47, var_48, 1, 0, 0, 0); 
-  __visc__edge(var_47, var_48, 1, 1, 1, 0); 
-  __visc__bindIn(var_48, 162, 2, 0); 
-  __visc__bindIn(var_48, 163, 3, 0); 
-
-  void* var_49 = __visc__createNodeND(0, var_49_node); 
-
-  __visc__edge(var_48, var_49, 1, 0, 0, 0); 
-  __visc__edge(var_48, var_49, 1, 1, 1, 0); 
-  __visc__bindIn(var_49, 164, 2, 0); 
-  __visc__bindIn(var_49, 165, 3, 0); 
-  __visc__bindIn(var_49, 166, 4, 0); 
-  __visc__bindIn(var_49, 167, 5, 0); 
-  __visc__bindIn(var_49, 168, 6, 0); 
-  __visc__bindIn(var_49, 169, 7, 0); 
-  __visc__bindIn(var_49, 170, 8, 0); 
-  __visc__bindIn(var_49, 171, 9, 0); 
-
-  void* var_50 = __visc__createNodeND(0, var_50_node); 
-
-  __visc__edge(var_49, var_50, 1, 0, 0, 0); 
-  __visc__edge(var_49, var_50, 1, 1, 1, 0); 
-
-  void* var_51 = __visc__createNodeND(0, var_51_node); 
-
-  __visc__edge(var_50, var_51, 1, 0, 0, 0); 
-  __visc__edge(var_50, var_51, 1, 1, 1, 0); 
-  __visc__bindIn(var_51, 172, 2, 0); 
-  __visc__bindIn(var_51, 173, 3, 0); 
-
-  void* var_52 = __visc__createNodeND(0, var_52_node); 
-
-  __visc__edge(var_51, var_52, 1, 0, 0, 0); 
-  __visc__edge(var_51, var_52, 1, 1, 1, 0); 
-  __visc__bindIn(var_52, 174, 2, 0); 
-  __visc__bindIn(var_52, 175, 3, 0); 
-  __visc__bindIn(var_52, 176, 4, 0); 
-  __visc__bindIn(var_52, 177, 5, 0); 
-  __visc__bindIn(var_52, 178, 6, 0); 
-  __visc__bindIn(var_52, 179, 7, 0); 
-  __visc__bindIn(var_52, 180, 8, 0); 
-  __visc__bindIn(var_52, 181, 9, 0); 
-
-  void* var_53 = __visc__createNodeND(0, var_53_node); 
-
-  __visc__edge(var_52, var_53, 1, 0, 0, 0); 
-  __visc__edge(var_52, var_53, 1, 1, 1, 0); 
-
-  void* var_54 = __visc__createNodeND(0, var_54_node); 
-
-  __visc__edge(var_53, var_54, 1, 0, 0, 0); 
-  __visc__edge(var_53, var_54, 1, 1, 1, 0); 
-  __visc__bindIn(var_54, 182, 2, 0); 
-  __visc__bindIn(var_54, 183, 3, 0); 
-
-  void* var_55 = __visc__createNodeND(0, var_55_node); 
-
-  __visc__edge(var_54, var_55, 1, 0, 0, 0); 
-  __visc__edge(var_54, var_55, 1, 1, 1, 0); 
-  __visc__bindIn(var_55, 184, 2, 0); 
-  __visc__bindIn(var_55, 185, 3, 0); 
-  __visc__bindIn(var_55, 186, 4, 0); 
-  __visc__bindIn(var_55, 187, 5, 0); 
-  __visc__bindIn(var_55, 188, 6, 0); 
-  __visc__bindIn(var_55, 189, 7, 0); 
-  __visc__bindIn(var_55, 190, 8, 0); 
-  __visc__bindIn(var_55, 191, 9, 0); 
-
-  void* var_56 = __visc__createNodeND(0, var_56_node); 
-
-  __visc__edge(var_55, var_56, 1, 0, 0, 0); 
-  __visc__edge(var_55, var_56, 1, 1, 1, 0); 
-
-  void* var_57 = __visc__createNodeND(0, var_57_node); 
-
-  __visc__edge(var_56, var_57, 1, 0, 0, 0); 
-  __visc__edge(var_56, var_57, 1, 1, 1, 0); 
-  __visc__bindIn(var_57, 192, 2, 0); 
-  __visc__bindIn(var_57, 193, 3, 0); 
-
-  void* var_58 = __visc__createNodeND(0, var_58_node); 
-
-  __visc__edge(var_57, var_58, 1, 0, 0, 0); 
-  __visc__edge(var_57, var_58, 1, 1, 1, 0); 
-  __visc__bindIn(var_58, 194, 2, 0); 
-  __visc__bindIn(var_58, 195, 3, 0); 
-  __visc__bindIn(var_58, 196, 4, 0); 
-  __visc__bindIn(var_58, 197, 5, 0); 
-  __visc__bindIn(var_58, 198, 6, 0); 
-  __visc__bindIn(var_58, 199, 7, 0); 
-  __visc__bindIn(var_58, 200, 8, 0); 
-  __visc__bindIn(var_58, 201, 9, 0); 
-
-  void* var_59 = __visc__createNodeND(0, var_59_node); 
-
-  __visc__edge(var_58, var_59, 1, 0, 0, 0); 
-  __visc__edge(var_58, var_59, 1, 1, 1, 0); 
-
-  void* var_60 = __visc__createNodeND(0, var_60_node); 
-
-  __visc__edge(var_59, var_60, 1, 0, 0, 0); 
-  __visc__edge(var_59, var_60, 1, 1, 1, 0); 
-  __visc__bindIn(var_60, 202, 2, 0); 
-  __visc__bindIn(var_60, 203, 3, 0); 
-
-  void* var_61 = __visc__createNodeND(0, var_61_node); 
-
-  __visc__edge(var_60, var_61, 1, 0, 0, 0); 
-  __visc__edge(var_60, var_61, 1, 1, 1, 0); 
-  __visc__bindIn(var_61, 204, 2, 0); 
-  __visc__bindIn(var_61, 205, 3, 0); 
-  __visc__bindIn(var_61, 206, 4, 0); 
-  __visc__bindIn(var_61, 207, 5, 0); 
-  __visc__bindIn(var_61, 208, 6, 0); 
-  __visc__bindIn(var_61, 209, 7, 0); 
-  __visc__bindIn(var_61, 210, 8, 0); 
-  __visc__bindIn(var_61, 211, 9, 0); 
-
-  void* var_62 = __visc__createNodeND(0, var_62_node); 
-
-  __visc__edge(var_61, var_62, 1, 0, 0, 0); 
-  __visc__edge(var_61, var_62, 1, 1, 1, 0); 
-
-  void* var_63 = __visc__createNodeND(0, var_63_node); 
-
-  __visc__edge(var_62, var_63, 1, 0, 0, 0); 
-  __visc__edge(var_62, var_63, 1, 1, 1, 0); 
-  __visc__bindIn(var_63, 212, 2, 0); 
-  __visc__bindIn(var_63, 213, 3, 0); 
-
-  void* var_64 = __visc__createNodeND(0, var_64_node); 
-
-  __visc__edge(var_63, var_64, 1, 0, 0, 0); 
-  __visc__edge(var_63, var_64, 1, 1, 1, 0); 
-  __visc__bindIn(var_64, 214, 2, 0); 
-  __visc__bindIn(var_64, 215, 3, 0); 
-  __visc__bindIn(var_64, 216, 4, 0); 
-  __visc__bindIn(var_64, 217, 5, 0); 
-  __visc__bindIn(var_64, 218, 6, 0); 
-  __visc__bindIn(var_64, 219, 7, 0); 
-  __visc__bindIn(var_64, 220, 8, 0); 
-  __visc__bindIn(var_64, 221, 9, 0); 
-
-  void* var_65 = __visc__createNodeND(0, var_65_node); 
-
-  __visc__edge(var_64, var_65, 1, 0, 0, 0); 
-  __visc__edge(var_64, var_65, 1, 1, 1, 0); 
-
-  void* var_66 = __visc__createNodeND(0, var_66_node); 
-
-  __visc__edge(var_65, var_66, 1, 0, 0, 0); 
-  __visc__edge(var_65, var_66, 1, 1, 1, 0); 
-  __visc__bindIn(var_66, 222, 2, 0); 
-  __visc__bindIn(var_66, 223, 3, 0); 
-
-  void* var_67 = __visc__createNodeND(0, var_67_node); 
-
-  __visc__edge(var_66, var_67, 1, 0, 0, 0); 
-  __visc__edge(var_66, var_67, 1, 1, 1, 0); 
-  __visc__bindIn(var_67, 224, 2, 0); 
-  __visc__bindIn(var_67, 225, 3, 0); 
-  __visc__bindIn(var_67, 226, 4, 0); 
-  __visc__bindIn(var_67, 227, 5, 0); 
-  __visc__bindIn(var_67, 228, 6, 0); 
-  __visc__bindIn(var_67, 229, 7, 0); 
-  __visc__bindIn(var_67, 230, 8, 0); 
-  __visc__bindIn(var_67, 231, 9, 0); 
-
-  void* var_68 = __visc__createNodeND(0, var_68_node); 
-
-  __visc__edge(var_67, var_68, 1, 0, 0, 0); 
-  __visc__edge(var_67, var_68, 1, 1, 1, 0); 
-
-  void* var_69 = __visc__createNodeND(0, var_69_node); 
-
-  __visc__edge(var_68, var_69, 1, 0, 0, 0); 
-  __visc__edge(var_68, var_69, 1, 1, 1, 0); 
-  __visc__bindIn(var_69, 232, 2, 0); 
-  __visc__bindIn(var_69, 233, 3, 0); 
-
-  void* var_70 = __visc__createNodeND(0, var_70_node); 
-
-  __visc__edge(var_69, var_70, 1, 0, 0, 0); 
-  __visc__edge(var_69, var_70, 1, 1, 1, 0); 
-  __visc__bindIn(var_70, 234, 2, 0); 
-  __visc__bindIn(var_70, 235, 3, 0); 
-  __visc__bindIn(var_70, 236, 4, 0); 
-  __visc__bindIn(var_70, 237, 5, 0); 
-  __visc__bindIn(var_70, 238, 6, 0); 
-  __visc__bindIn(var_70, 239, 7, 0); 
-  __visc__bindIn(var_70, 240, 8, 0); 
-  __visc__bindIn(var_70, 241, 9, 0); 
-
-  void* var_71 = __visc__createNodeND(0, var_71_node); 
-
-  __visc__edge(var_70, var_71, 1, 0, 0, 0); 
-  __visc__edge(var_70, var_71, 1, 1, 1, 0); 
-
-  void* var_72 = __visc__createNodeND(0, var_72_node); 
-
-  __visc__edge(var_71, var_72, 1, 0, 0, 0); 
-  __visc__edge(var_71, var_72, 1, 1, 1, 0); 
-  __visc__bindIn(var_72, 242, 2, 0); 
-  __visc__bindIn(var_72, 243, 3, 0); 
-
-  void* var_73 = __visc__createNodeND(0, var_73_node); 
-
-  __visc__edge(var_72, var_73, 1, 0, 0, 0); 
-  __visc__edge(var_72, var_73, 1, 1, 1, 0); 
-  __visc__bindIn(var_73, 244, 2, 0); 
-  __visc__bindIn(var_73, 245, 3, 0); 
-  __visc__bindIn(var_73, 246, 4, 0); 
-  __visc__bindIn(var_73, 247, 5, 0); 
-  __visc__bindIn(var_73, 248, 6, 0); 
-  __visc__bindIn(var_73, 249, 7, 0); 
-  __visc__bindIn(var_73, 250, 8, 0); 
-  __visc__bindIn(var_73, 251, 9, 0); 
-
-  void* var_74 = __visc__createNodeND(0, var_74_node); 
-
-  __visc__edge(var_73, var_74, 1, 0, 0, 0); 
-  __visc__edge(var_73, var_74, 1, 1, 1, 0); 
-
-  void* var_75 = __visc__createNodeND(0, var_75_node); 
-
-  __visc__edge(var_74, var_75, 1, 0, 0, 0); 
-  __visc__edge(var_74, var_75, 1, 1, 1, 0); 
-  __visc__bindIn(var_75, 252, 2, 0); 
-  __visc__bindIn(var_75, 253, 3, 0); 
-
-  void* var_76 = __visc__createNodeND(0, var_76_node); 
-
-  __visc__edge(var_75, var_76, 1, 0, 0, 0); 
-  __visc__edge(var_75, var_76, 1, 1, 1, 0); 
-  __visc__bindIn(var_76, 254, 2, 0); 
-  __visc__bindIn(var_76, 255, 3, 0); 
-  __visc__bindIn(var_76, 256, 4, 0); 
-  __visc__bindIn(var_76, 257, 5, 0); 
-  __visc__bindIn(var_76, 258, 6, 0); 
-  __visc__bindIn(var_76, 259, 7, 0); 
-  __visc__bindIn(var_76, 260, 8, 0); 
-  __visc__bindIn(var_76, 261, 9, 0); 
-
-  void* var_77 = __visc__createNodeND(0, var_77_node); 
-
-  __visc__edge(var_76, var_77, 1, 0, 0, 0); 
-  __visc__edge(var_76, var_77, 1, 1, 1, 0); 
-
-  void* var_78 = __visc__createNodeND(0, var_78_node); 
-
-  __visc__edge(var_77, var_78, 1, 0, 0, 0); 
-  __visc__edge(var_77, var_78, 1, 1, 1, 0); 
-  __visc__bindIn(var_78, 262, 2, 0); 
-  __visc__bindIn(var_78, 263, 3, 0); 
-
-  void* var_79 = __visc__createNodeND(0, var_79_node); 
-
-  __visc__edge(var_78, var_79, 1, 0, 0, 0); 
-  __visc__edge(var_78, var_79, 1, 1, 1, 0); 
-  __visc__bindIn(var_79, 264, 2, 0); 
-  __visc__bindIn(var_79, 265, 3, 0); 
-  __visc__bindIn(var_79, 266, 4, 0); 
-  __visc__bindIn(var_79, 267, 5, 0); 
-  __visc__bindIn(var_79, 268, 6, 0); 
-  __visc__bindIn(var_79, 269, 7, 0); 
-  __visc__bindIn(var_79, 270, 8, 0); 
-  __visc__bindIn(var_79, 271, 9, 0); 
-
-  void* var_80 = __visc__createNodeND(0, var_80_node); 
-
-  __visc__edge(var_79, var_80, 1, 0, 0, 0); 
-  __visc__edge(var_79, var_80, 1, 1, 1, 0); 
-
-  void* var_81 = __visc__createNodeND(0, var_81_node); 
-
-  __visc__edge(var_80, var_81, 1, 0, 0, 0); 
-  __visc__edge(var_80, var_81, 1, 1, 1, 0); 
-
-  void* var_82 = __visc__createNodeND(0, var_82_node); 
-
-  __visc__edge(var_81, var_82, 1, 0, 0, 0); 
-  __visc__edge(var_81, var_82, 1, 1, 1, 0); 
-  __visc__bindIn(var_82, 272, 2, 0); 
-  __visc__bindIn(var_82, 273, 3, 0); 
-
-  void* var_83 = __visc__createNodeND(0, var_83_node); 
-
-  __visc__edge(var_82, var_83, 1, 0, 0, 0); 
-  __visc__edge(var_82, var_83, 1, 1, 1, 0); 
-  __visc__bindIn(var_83, 274, 2, 0); 
-  __visc__bindIn(var_83, 275, 3, 0); 
-
-  void* var_84 = __visc__createNodeND(0, var_84_node); 
-
-  __visc__edge(var_83, var_84, 1, 0, 0, 0); 
-  __visc__edge(var_83, var_84, 1, 1, 1, 0); 
-
-  __visc__bindOut(var_84, 0, 0, 0); 
-  __visc__bindOut(var_84, 1, 1, 0); 
-
-}
-
-struct ret_t {
-  void* tensor; 
-  size_t bytes; 
-}; 
-
-typedef struct __attribute__((__packed__)) {
-  void* input; 
-  size_t input_bytes; 
-  void* conv2d_1_w; 
-  size_t conv2d_1_w_bytes; 
-  void* batch_normalization_1_gamma; 
-  size_t batch_normalization_1_gamma_bytes; 
-  void* batch_normalization_1_beta; 
-  size_t batch_normalization_1_beta_bytes; 
-  void* batch_normalization_1_mean; 
-  size_t batch_normalization_1_mean_bytes; 
-  void* batch_normalization_1_variance; 
-  size_t batch_normalization_1_variance_bytes; 
-  void* depthwise_conv2d_1_w; 
-  size_t depthwise_conv2d_1_w_bytes; 
-  void* batch_normalization_2_gamma; 
-  size_t batch_normalization_2_gamma_bytes; 
-  void* batch_normalization_2_beta; 
-  size_t batch_normalization_2_beta_bytes; 
-  void* batch_normalization_2_mean; 
-  size_t batch_normalization_2_mean_bytes; 
-  void* batch_normalization_2_variance; 
-  size_t batch_normalization_2_variance_bytes; 
-  void* conv2d_2_w; 
-  size_t conv2d_2_w_bytes; 
-  void* batch_normalization_3_gamma; 
-  size_t batch_normalization_3_gamma_bytes; 
-  void* batch_normalization_3_beta; 
-  size_t batch_normalization_3_beta_bytes; 
-  void* batch_normalization_3_mean; 
-  size_t batch_normalization_3_mean_bytes; 
-  void* batch_normalization_3_variance; 
-  size_t batch_normalization_3_variance_bytes; 
-  void* depthwise_conv2d_2_w; 
-  size_t depthwise_conv2d_2_w_bytes; 
-  void* batch_normalization_4_gamma; 
-  size_t batch_normalization_4_gamma_bytes; 
-  void* batch_normalization_4_beta; 
-  size_t batch_normalization_4_beta_bytes; 
-  void* batch_normalization_4_mean; 
-  size_t batch_normalization_4_mean_bytes; 
-  void* batch_normalization_4_variance; 
-  size_t batch_normalization_4_variance_bytes; 
-  void* conv2d_3_w; 
-  size_t conv2d_3_w_bytes; 
-  void* batch_normalization_5_gamma; 
-  size_t batch_normalization_5_gamma_bytes; 
-  void* batch_normalization_5_beta; 
-  size_t batch_normalization_5_beta_bytes; 
-  void* batch_normalization_5_mean; 
-  size_t batch_normalization_5_mean_bytes; 
-  void* batch_normalization_5_variance; 
-  size_t batch_normalization_5_variance_bytes; 
-  void* depthwise_conv2d_3_w; 
-  size_t depthwise_conv2d_3_w_bytes; 
-  void* batch_normalization_6_gamma; 
-  size_t batch_normalization_6_gamma_bytes; 
-  void* batch_normalization_6_beta; 
-  size_t batch_normalization_6_beta_bytes; 
-  void* batch_normalization_6_mean; 
-  size_t batch_normalization_6_mean_bytes; 
-  void* batch_normalization_6_variance; 
-  size_t batch_normalization_6_variance_bytes; 
-  void* conv2d_4_w; 
-  size_t conv2d_4_w_bytes; 
-  void* batch_normalization_7_gamma; 
-  size_t batch_normalization_7_gamma_bytes; 
-  void* batch_normalization_7_beta; 
-  size_t batch_normalization_7_beta_bytes; 
-  void* batch_normalization_7_mean; 
-  size_t batch_normalization_7_mean_bytes; 
-  void* batch_normalization_7_variance; 
-  size_t batch_normalization_7_variance_bytes; 
-  void* depthwise_conv2d_4_w; 
-  size_t depthwise_conv2d_4_w_bytes; 
-  void* batch_normalization_8_gamma; 
-  size_t batch_normalization_8_gamma_bytes; 
-  void* batch_normalization_8_beta; 
-  size_t batch_normalization_8_beta_bytes; 
-  void* batch_normalization_8_mean; 
-  size_t batch_normalization_8_mean_bytes; 
-  void* batch_normalization_8_variance; 
-  size_t batch_normalization_8_variance_bytes; 
-  void* conv2d_5_w; 
-  size_t conv2d_5_w_bytes; 
-  void* batch_normalization_9_gamma; 
-  size_t batch_normalization_9_gamma_bytes; 
-  void* batch_normalization_9_beta; 
-  size_t batch_normalization_9_beta_bytes; 
-  void* batch_normalization_9_mean; 
-  size_t batch_normalization_9_mean_bytes; 
-  void* batch_normalization_9_variance; 
-  size_t batch_normalization_9_variance_bytes; 
-  void* depthwise_conv2d_5_w; 
-  size_t depthwise_conv2d_5_w_bytes; 
-  void* batch_normalization_10_gamma; 
-  size_t batch_normalization_10_gamma_bytes; 
-  void* batch_normalization_10_beta; 
-  size_t batch_normalization_10_beta_bytes; 
-  void* batch_normalization_10_mean; 
-  size_t batch_normalization_10_mean_bytes; 
-  void* batch_normalization_10_variance; 
-  size_t batch_normalization_10_variance_bytes; 
-  void* conv2d_6_w; 
-  size_t conv2d_6_w_bytes; 
-  void* batch_normalization_11_gamma; 
-  size_t batch_normalization_11_gamma_bytes; 
-  void* batch_normalization_11_beta; 
-  size_t batch_normalization_11_beta_bytes; 
-  void* batch_normalization_11_mean; 
-  size_t batch_normalization_11_mean_bytes; 
-  void* batch_normalization_11_variance; 
-  size_t batch_normalization_11_variance_bytes; 
-  void* depthwise_conv2d_6_w; 
-  size_t depthwise_conv2d_6_w_bytes; 
-  void* batch_normalization_12_gamma; 
-  size_t batch_normalization_12_gamma_bytes; 
-  void* batch_normalization_12_beta; 
-  size_t batch_normalization_12_beta_bytes; 
-  void* batch_normalization_12_mean; 
-  size_t batch_normalization_12_mean_bytes; 
-  void* batch_normalization_12_variance; 
-  size_t batch_normalization_12_variance_bytes; 
-  void* conv2d_7_w; 
-  size_t conv2d_7_w_bytes; 
-  void* batch_normalization_13_gamma; 
-  size_t batch_normalization_13_gamma_bytes; 
-  void* batch_normalization_13_beta; 
-  size_t batch_normalization_13_beta_bytes; 
-  void* batch_normalization_13_mean; 
-  size_t batch_normalization_13_mean_bytes; 
-  void* batch_normalization_13_variance; 
-  size_t batch_normalization_13_variance_bytes; 
-  void* depthwise_conv2d_7_w; 
-  size_t depthwise_conv2d_7_w_bytes; 
-  void* batch_normalization_14_gamma; 
-  size_t batch_normalization_14_gamma_bytes; 
-  void* batch_normalization_14_beta; 
-  size_t batch_normalization_14_beta_bytes; 
-  void* batch_normalization_14_mean; 
-  size_t batch_normalization_14_mean_bytes; 
-  void* batch_normalization_14_variance; 
-  size_t batch_normalization_14_variance_bytes; 
-  void* conv2d_8_w; 
-  size_t conv2d_8_w_bytes; 
-  void* batch_normalization_15_gamma; 
-  size_t batch_normalization_15_gamma_bytes; 
-  void* batch_normalization_15_beta; 
-  size_t batch_normalization_15_beta_bytes; 
-  void* batch_normalization_15_mean; 
-  size_t batch_normalization_15_mean_bytes; 
-  void* batch_normalization_15_variance; 
-  size_t batch_normalization_15_variance_bytes; 
-  void* depthwise_conv2d_8_w; 
-  size_t depthwise_conv2d_8_w_bytes; 
-  void* batch_normalization_16_gamma; 
-  size_t batch_normalization_16_gamma_bytes; 
-  void* batch_normalization_16_beta; 
-  size_t batch_normalization_16_beta_bytes; 
-  void* batch_normalization_16_mean; 
-  size_t batch_normalization_16_mean_bytes; 
-  void* batch_normalization_16_variance; 
-  size_t batch_normalization_16_variance_bytes; 
-  void* conv2d_9_w; 
-  size_t conv2d_9_w_bytes; 
-  void* batch_normalization_17_gamma; 
-  size_t batch_normalization_17_gamma_bytes; 
-  void* batch_normalization_17_beta; 
-  size_t batch_normalization_17_beta_bytes; 
-  void* batch_normalization_17_mean; 
-  size_t batch_normalization_17_mean_bytes; 
-  void* batch_normalization_17_variance; 
-  size_t batch_normalization_17_variance_bytes; 
-  void* depthwise_conv2d_9_w; 
-  size_t depthwise_conv2d_9_w_bytes; 
-  void* batch_normalization_18_gamma; 
-  size_t batch_normalization_18_gamma_bytes; 
-  void* batch_normalization_18_beta; 
-  size_t batch_normalization_18_beta_bytes; 
-  void* batch_normalization_18_mean; 
-  size_t batch_normalization_18_mean_bytes; 
-  void* batch_normalization_18_variance; 
-  size_t batch_normalization_18_variance_bytes; 
-  void* conv2d_10_w; 
-  size_t conv2d_10_w_bytes; 
-  void* batch_normalization_19_gamma; 
-  size_t batch_normalization_19_gamma_bytes; 
-  void* batch_normalization_19_beta; 
-  size_t batch_normalization_19_beta_bytes; 
-  void* batch_normalization_19_mean; 
-  size_t batch_normalization_19_mean_bytes; 
-  void* batch_normalization_19_variance; 
-  size_t batch_normalization_19_variance_bytes; 
-  void* depthwise_conv2d_10_w; 
-  size_t depthwise_conv2d_10_w_bytes; 
-  void* batch_normalization_20_gamma; 
-  size_t batch_normalization_20_gamma_bytes; 
-  void* batch_normalization_20_beta; 
-  size_t batch_normalization_20_beta_bytes; 
-  void* batch_normalization_20_mean; 
-  size_t batch_normalization_20_mean_bytes; 
-  void* batch_normalization_20_variance; 
-  size_t batch_normalization_20_variance_bytes; 
-  void* conv2d_11_w; 
-  size_t conv2d_11_w_bytes; 
-  void* batch_normalization_21_gamma; 
-  size_t batch_normalization_21_gamma_bytes; 
-  void* batch_normalization_21_beta; 
-  size_t batch_normalization_21_beta_bytes; 
-  void* batch_normalization_21_mean; 
-  size_t batch_normalization_21_mean_bytes; 
-  void* batch_normalization_21_variance; 
-  size_t batch_normalization_21_variance_bytes; 
-  void* depthwise_conv2d_11_w; 
-  size_t depthwise_conv2d_11_w_bytes; 
-  void* batch_normalization_22_gamma; 
-  size_t batch_normalization_22_gamma_bytes; 
-  void* batch_normalization_22_beta; 
-  size_t batch_normalization_22_beta_bytes; 
-  void* batch_normalization_22_mean; 
-  size_t batch_normalization_22_mean_bytes; 
-  void* batch_normalization_22_variance; 
-  size_t batch_normalization_22_variance_bytes; 
-  void* conv2d_12_w; 
-  size_t conv2d_12_w_bytes; 
-  void* batch_normalization_23_gamma; 
-  size_t batch_normalization_23_gamma_bytes; 
-  void* batch_normalization_23_beta; 
-  size_t batch_normalization_23_beta_bytes; 
-  void* batch_normalization_23_mean; 
-  size_t batch_normalization_23_mean_bytes; 
-  void* batch_normalization_23_variance; 
-  size_t batch_normalization_23_variance_bytes; 
-  void* depthwise_conv2d_12_w; 
-  size_t depthwise_conv2d_12_w_bytes; 
-  void* batch_normalization_24_gamma; 
-  size_t batch_normalization_24_gamma_bytes; 
-  void* batch_normalization_24_beta; 
-  size_t batch_normalization_24_beta_bytes; 
-  void* batch_normalization_24_mean; 
-  size_t batch_normalization_24_mean_bytes; 
-  void* batch_normalization_24_variance; 
-  size_t batch_normalization_24_variance_bytes; 
-  void* conv2d_13_w; 
-  size_t conv2d_13_w_bytes; 
-  void* batch_normalization_25_gamma; 
-  size_t batch_normalization_25_gamma_bytes; 
-  void* batch_normalization_25_beta; 
-  size_t batch_normalization_25_beta_bytes; 
-  void* batch_normalization_25_mean; 
-  size_t batch_normalization_25_mean_bytes; 
-  void* batch_normalization_25_variance; 
-  size_t batch_normalization_25_variance_bytes; 
-  void* depthwise_conv2d_13_w; 
-  size_t depthwise_conv2d_13_w_bytes; 
-  void* batch_normalization_26_gamma; 
-  size_t batch_normalization_26_gamma_bytes; 
-  void* batch_normalization_26_beta; 
-  size_t batch_normalization_26_beta_bytes; 
-  void* batch_normalization_26_mean; 
-  size_t batch_normalization_26_mean_bytes; 
-  void* batch_normalization_26_variance; 
-  size_t batch_normalization_26_variance_bytes; 
-  void* conv2d_14_w; 
-  size_t conv2d_14_w_bytes; 
-  void* batch_normalization_27_gamma; 
-  size_t batch_normalization_27_gamma_bytes; 
-  void* batch_normalization_27_beta; 
-  size_t batch_normalization_27_beta_bytes; 
-  void* batch_normalization_27_mean; 
-  size_t batch_normalization_27_mean_bytes; 
-  void* batch_normalization_27_variance; 
-  size_t batch_normalization_27_variance_bytes; 
-  void* dense_1_w; 
-  size_t dense_1_w_bytes; 
-  void* dense_1_b; 
-  size_t dense_1_b_bytes; 
-
-  struct ret_t r; 
-}
-RootIn;
-
-int main(){ 
-
-std::string dir_prefix = std::string("data/mobilenet_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-
-__visc__init(); 
-RootIn* args = static_cast<RootIn*>(malloc(sizeof(RootIn))); 
-
-args->input = input; 
-args->input_bytes = 0; 
-args->conv2d_1_w = conv2d_1_w; 
-args->conv2d_1_w_bytes = 0; 
-args->batch_normalization_1_gamma = batch_normalization_1_gamma; 
-args->batch_normalization_1_gamma_bytes = 0; 
-args->batch_normalization_1_beta = batch_normalization_1_beta; 
-args->batch_normalization_1_beta_bytes = 0; 
-args->batch_normalization_1_mean = batch_normalization_1_mean; 
-args->batch_normalization_1_mean_bytes = 0; 
-args->batch_normalization_1_variance = batch_normalization_1_variance; 
-args->batch_normalization_1_variance_bytes = 0; 
-args->depthwise_conv2d_1_w = depthwise_conv2d_1_w; 
-args->depthwise_conv2d_1_w_bytes = 0; 
-args->batch_normalization_2_gamma = batch_normalization_2_gamma; 
-args->batch_normalization_2_gamma_bytes = 0; 
-args->batch_normalization_2_beta = batch_normalization_2_beta; 
-args->batch_normalization_2_beta_bytes = 0; 
-args->batch_normalization_2_mean = batch_normalization_2_mean; 
-args->batch_normalization_2_mean_bytes = 0; 
-args->batch_normalization_2_variance = batch_normalization_2_variance; 
-args->batch_normalization_2_variance_bytes = 0; 
-args->conv2d_2_w = conv2d_2_w; 
-args->conv2d_2_w_bytes = 0; 
-args->batch_normalization_3_gamma = batch_normalization_3_gamma; 
-args->batch_normalization_3_gamma_bytes = 0; 
-args->batch_normalization_3_beta = batch_normalization_3_beta; 
-args->batch_normalization_3_beta_bytes = 0; 
-args->batch_normalization_3_mean = batch_normalization_3_mean; 
-args->batch_normalization_3_mean_bytes = 0; 
-args->batch_normalization_3_variance = batch_normalization_3_variance; 
-args->batch_normalization_3_variance_bytes = 0; 
-args->depthwise_conv2d_2_w = depthwise_conv2d_2_w; 
-args->depthwise_conv2d_2_w_bytes = 0; 
-args->batch_normalization_4_gamma = batch_normalization_4_gamma; 
-args->batch_normalization_4_gamma_bytes = 0; 
-args->batch_normalization_4_beta = batch_normalization_4_beta; 
-args->batch_normalization_4_beta_bytes = 0; 
-args->batch_normalization_4_mean = batch_normalization_4_mean; 
-args->batch_normalization_4_mean_bytes = 0; 
-args->batch_normalization_4_variance = batch_normalization_4_variance; 
-args->batch_normalization_4_variance_bytes = 0; 
-args->conv2d_3_w = conv2d_3_w; 
-args->conv2d_3_w_bytes = 0; 
-args->batch_normalization_5_gamma = batch_normalization_5_gamma; 
-args->batch_normalization_5_gamma_bytes = 0; 
-args->batch_normalization_5_beta = batch_normalization_5_beta; 
-args->batch_normalization_5_beta_bytes = 0; 
-args->batch_normalization_5_mean = batch_normalization_5_mean; 
-args->batch_normalization_5_mean_bytes = 0; 
-args->batch_normalization_5_variance = batch_normalization_5_variance; 
-args->batch_normalization_5_variance_bytes = 0; 
-args->depthwise_conv2d_3_w = depthwise_conv2d_3_w; 
-args->depthwise_conv2d_3_w_bytes = 0; 
-args->batch_normalization_6_gamma = batch_normalization_6_gamma; 
-args->batch_normalization_6_gamma_bytes = 0; 
-args->batch_normalization_6_beta = batch_normalization_6_beta; 
-args->batch_normalization_6_beta_bytes = 0; 
-args->batch_normalization_6_mean = batch_normalization_6_mean; 
-args->batch_normalization_6_mean_bytes = 0; 
-args->batch_normalization_6_variance = batch_normalization_6_variance; 
-args->batch_normalization_6_variance_bytes = 0; 
-args->conv2d_4_w = conv2d_4_w; 
-args->conv2d_4_w_bytes = 0; 
-args->batch_normalization_7_gamma = batch_normalization_7_gamma; 
-args->batch_normalization_7_gamma_bytes = 0; 
-args->batch_normalization_7_beta = batch_normalization_7_beta; 
-args->batch_normalization_7_beta_bytes = 0; 
-args->batch_normalization_7_mean = batch_normalization_7_mean; 
-args->batch_normalization_7_mean_bytes = 0; 
-args->batch_normalization_7_variance = batch_normalization_7_variance; 
-args->batch_normalization_7_variance_bytes = 0; 
-args->depthwise_conv2d_4_w = depthwise_conv2d_4_w; 
-args->depthwise_conv2d_4_w_bytes = 0; 
-args->batch_normalization_8_gamma = batch_normalization_8_gamma; 
-args->batch_normalization_8_gamma_bytes = 0; 
-args->batch_normalization_8_beta = batch_normalization_8_beta; 
-args->batch_normalization_8_beta_bytes = 0; 
-args->batch_normalization_8_mean = batch_normalization_8_mean; 
-args->batch_normalization_8_mean_bytes = 0; 
-args->batch_normalization_8_variance = batch_normalization_8_variance; 
-args->batch_normalization_8_variance_bytes = 0; 
-args->conv2d_5_w = conv2d_5_w; 
-args->conv2d_5_w_bytes = 0; 
-args->batch_normalization_9_gamma = batch_normalization_9_gamma; 
-args->batch_normalization_9_gamma_bytes = 0; 
-args->batch_normalization_9_beta = batch_normalization_9_beta; 
-args->batch_normalization_9_beta_bytes = 0; 
-args->batch_normalization_9_mean = batch_normalization_9_mean; 
-args->batch_normalization_9_mean_bytes = 0; 
-args->batch_normalization_9_variance = batch_normalization_9_variance; 
-args->batch_normalization_9_variance_bytes = 0; 
-args->depthwise_conv2d_5_w = depthwise_conv2d_5_w; 
-args->depthwise_conv2d_5_w_bytes = 0; 
-args->batch_normalization_10_gamma = batch_normalization_10_gamma; 
-args->batch_normalization_10_gamma_bytes = 0; 
-args->batch_normalization_10_beta = batch_normalization_10_beta; 
-args->batch_normalization_10_beta_bytes = 0; 
-args->batch_normalization_10_mean = batch_normalization_10_mean; 
-args->batch_normalization_10_mean_bytes = 0; 
-args->batch_normalization_10_variance = batch_normalization_10_variance; 
-args->batch_normalization_10_variance_bytes = 0; 
-args->conv2d_6_w = conv2d_6_w; 
-args->conv2d_6_w_bytes = 0; 
-args->batch_normalization_11_gamma = batch_normalization_11_gamma; 
-args->batch_normalization_11_gamma_bytes = 0; 
-args->batch_normalization_11_beta = batch_normalization_11_beta; 
-args->batch_normalization_11_beta_bytes = 0; 
-args->batch_normalization_11_mean = batch_normalization_11_mean; 
-args->batch_normalization_11_mean_bytes = 0; 
-args->batch_normalization_11_variance = batch_normalization_11_variance; 
-args->batch_normalization_11_variance_bytes = 0; 
-args->depthwise_conv2d_6_w = depthwise_conv2d_6_w; 
-args->depthwise_conv2d_6_w_bytes = 0; 
-args->batch_normalization_12_gamma = batch_normalization_12_gamma; 
-args->batch_normalization_12_gamma_bytes = 0; 
-args->batch_normalization_12_beta = batch_normalization_12_beta; 
-args->batch_normalization_12_beta_bytes = 0; 
-args->batch_normalization_12_mean = batch_normalization_12_mean; 
-args->batch_normalization_12_mean_bytes = 0; 
-args->batch_normalization_12_variance = batch_normalization_12_variance; 
-args->batch_normalization_12_variance_bytes = 0; 
-args->conv2d_7_w = conv2d_7_w; 
-args->conv2d_7_w_bytes = 0; 
-args->batch_normalization_13_gamma = batch_normalization_13_gamma; 
-args->batch_normalization_13_gamma_bytes = 0; 
-args->batch_normalization_13_beta = batch_normalization_13_beta; 
-args->batch_normalization_13_beta_bytes = 0; 
-args->batch_normalization_13_mean = batch_normalization_13_mean; 
-args->batch_normalization_13_mean_bytes = 0; 
-args->batch_normalization_13_variance = batch_normalization_13_variance; 
-args->batch_normalization_13_variance_bytes = 0; 
-args->depthwise_conv2d_7_w = depthwise_conv2d_7_w; 
-args->depthwise_conv2d_7_w_bytes = 0; 
-args->batch_normalization_14_gamma = batch_normalization_14_gamma; 
-args->batch_normalization_14_gamma_bytes = 0; 
-args->batch_normalization_14_beta = batch_normalization_14_beta; 
-args->batch_normalization_14_beta_bytes = 0; 
-args->batch_normalization_14_mean = batch_normalization_14_mean; 
-args->batch_normalization_14_mean_bytes = 0; 
-args->batch_normalization_14_variance = batch_normalization_14_variance; 
-args->batch_normalization_14_variance_bytes = 0; 
-args->conv2d_8_w = conv2d_8_w; 
-args->conv2d_8_w_bytes = 0; 
-args->batch_normalization_15_gamma = batch_normalization_15_gamma; 
-args->batch_normalization_15_gamma_bytes = 0; 
-args->batch_normalization_15_beta = batch_normalization_15_beta; 
-args->batch_normalization_15_beta_bytes = 0; 
-args->batch_normalization_15_mean = batch_normalization_15_mean; 
-args->batch_normalization_15_mean_bytes = 0; 
-args->batch_normalization_15_variance = batch_normalization_15_variance; 
-args->batch_normalization_15_variance_bytes = 0; 
-args->depthwise_conv2d_8_w = depthwise_conv2d_8_w; 
-args->depthwise_conv2d_8_w_bytes = 0; 
-args->batch_normalization_16_gamma = batch_normalization_16_gamma; 
-args->batch_normalization_16_gamma_bytes = 0; 
-args->batch_normalization_16_beta = batch_normalization_16_beta; 
-args->batch_normalization_16_beta_bytes = 0; 
-args->batch_normalization_16_mean = batch_normalization_16_mean; 
-args->batch_normalization_16_mean_bytes = 0; 
-args->batch_normalization_16_variance = batch_normalization_16_variance; 
-args->batch_normalization_16_variance_bytes = 0; 
-args->conv2d_9_w = conv2d_9_w; 
-args->conv2d_9_w_bytes = 0; 
-args->batch_normalization_17_gamma = batch_normalization_17_gamma; 
-args->batch_normalization_17_gamma_bytes = 0; 
-args->batch_normalization_17_beta = batch_normalization_17_beta; 
-args->batch_normalization_17_beta_bytes = 0; 
-args->batch_normalization_17_mean = batch_normalization_17_mean; 
-args->batch_normalization_17_mean_bytes = 0; 
-args->batch_normalization_17_variance = batch_normalization_17_variance; 
-args->batch_normalization_17_variance_bytes = 0; 
-args->depthwise_conv2d_9_w = depthwise_conv2d_9_w; 
-args->depthwise_conv2d_9_w_bytes = 0; 
-args->batch_normalization_18_gamma = batch_normalization_18_gamma; 
-args->batch_normalization_18_gamma_bytes = 0; 
-args->batch_normalization_18_beta = batch_normalization_18_beta; 
-args->batch_normalization_18_beta_bytes = 0; 
-args->batch_normalization_18_mean = batch_normalization_18_mean; 
-args->batch_normalization_18_mean_bytes = 0; 
-args->batch_normalization_18_variance = batch_normalization_18_variance; 
-args->batch_normalization_18_variance_bytes = 0; 
-args->conv2d_10_w = conv2d_10_w; 
-args->conv2d_10_w_bytes = 0; 
-args->batch_normalization_19_gamma = batch_normalization_19_gamma; 
-args->batch_normalization_19_gamma_bytes = 0; 
-args->batch_normalization_19_beta = batch_normalization_19_beta; 
-args->batch_normalization_19_beta_bytes = 0; 
-args->batch_normalization_19_mean = batch_normalization_19_mean; 
-args->batch_normalization_19_mean_bytes = 0; 
-args->batch_normalization_19_variance = batch_normalization_19_variance; 
-args->batch_normalization_19_variance_bytes = 0; 
-args->depthwise_conv2d_10_w = depthwise_conv2d_10_w; 
-args->depthwise_conv2d_10_w_bytes = 0; 
-args->batch_normalization_20_gamma = batch_normalization_20_gamma; 
-args->batch_normalization_20_gamma_bytes = 0; 
-args->batch_normalization_20_beta = batch_normalization_20_beta; 
-args->batch_normalization_20_beta_bytes = 0; 
-args->batch_normalization_20_mean = batch_normalization_20_mean; 
-args->batch_normalization_20_mean_bytes = 0; 
-args->batch_normalization_20_variance = batch_normalization_20_variance; 
-args->batch_normalization_20_variance_bytes = 0; 
-args->conv2d_11_w = conv2d_11_w; 
-args->conv2d_11_w_bytes = 0; 
-args->batch_normalization_21_gamma = batch_normalization_21_gamma; 
-args->batch_normalization_21_gamma_bytes = 0; 
-args->batch_normalization_21_beta = batch_normalization_21_beta; 
-args->batch_normalization_21_beta_bytes = 0; 
-args->batch_normalization_21_mean = batch_normalization_21_mean; 
-args->batch_normalization_21_mean_bytes = 0; 
-args->batch_normalization_21_variance = batch_normalization_21_variance; 
-args->batch_normalization_21_variance_bytes = 0; 
-args->depthwise_conv2d_11_w = depthwise_conv2d_11_w; 
-args->depthwise_conv2d_11_w_bytes = 0; 
-args->batch_normalization_22_gamma = batch_normalization_22_gamma; 
-args->batch_normalization_22_gamma_bytes = 0; 
-args->batch_normalization_22_beta = batch_normalization_22_beta; 
-args->batch_normalization_22_beta_bytes = 0; 
-args->batch_normalization_22_mean = batch_normalization_22_mean; 
-args->batch_normalization_22_mean_bytes = 0; 
-args->batch_normalization_22_variance = batch_normalization_22_variance; 
-args->batch_normalization_22_variance_bytes = 0; 
-args->conv2d_12_w = conv2d_12_w; 
-args->conv2d_12_w_bytes = 0; 
-args->batch_normalization_23_gamma = batch_normalization_23_gamma; 
-args->batch_normalization_23_gamma_bytes = 0; 
-args->batch_normalization_23_beta = batch_normalization_23_beta; 
-args->batch_normalization_23_beta_bytes = 0; 
-args->batch_normalization_23_mean = batch_normalization_23_mean; 
-args->batch_normalization_23_mean_bytes = 0; 
-args->batch_normalization_23_variance = batch_normalization_23_variance; 
-args->batch_normalization_23_variance_bytes = 0; 
-args->depthwise_conv2d_12_w = depthwise_conv2d_12_w; 
-args->depthwise_conv2d_12_w_bytes = 0; 
-args->batch_normalization_24_gamma = batch_normalization_24_gamma; 
-args->batch_normalization_24_gamma_bytes = 0; 
-args->batch_normalization_24_beta = batch_normalization_24_beta; 
-args->batch_normalization_24_beta_bytes = 0; 
-args->batch_normalization_24_mean = batch_normalization_24_mean; 
-args->batch_normalization_24_mean_bytes = 0; 
-args->batch_normalization_24_variance = batch_normalization_24_variance; 
-args->batch_normalization_24_variance_bytes = 0; 
-args->conv2d_13_w = conv2d_13_w; 
-args->conv2d_13_w_bytes = 0; 
-args->batch_normalization_25_gamma = batch_normalization_25_gamma; 
-args->batch_normalization_25_gamma_bytes = 0; 
-args->batch_normalization_25_beta = batch_normalization_25_beta; 
-args->batch_normalization_25_beta_bytes = 0; 
-args->batch_normalization_25_mean = batch_normalization_25_mean; 
-args->batch_normalization_25_mean_bytes = 0; 
-args->batch_normalization_25_variance = batch_normalization_25_variance; 
-args->batch_normalization_25_variance_bytes = 0; 
-args->depthwise_conv2d_13_w = depthwise_conv2d_13_w; 
-args->depthwise_conv2d_13_w_bytes = 0; 
-args->batch_normalization_26_gamma = batch_normalization_26_gamma; 
-args->batch_normalization_26_gamma_bytes = 0; 
-args->batch_normalization_26_beta = batch_normalization_26_beta; 
-args->batch_normalization_26_beta_bytes = 0; 
-args->batch_normalization_26_mean = batch_normalization_26_mean; 
-args->batch_normalization_26_mean_bytes = 0; 
-args->batch_normalization_26_variance = batch_normalization_26_variance; 
-args->batch_normalization_26_variance_bytes = 0; 
-args->conv2d_14_w = conv2d_14_w; 
-args->conv2d_14_w_bytes = 0; 
-args->batch_normalization_27_gamma = batch_normalization_27_gamma; 
-args->batch_normalization_27_gamma_bytes = 0; 
-args->batch_normalization_27_beta = batch_normalization_27_beta; 
-args->batch_normalization_27_beta_bytes = 0; 
-args->batch_normalization_27_mean = batch_normalization_27_mean; 
-args->batch_normalization_27_mean_bytes = 0; 
-args->batch_normalization_27_variance = batch_normalization_27_variance; 
-args->batch_normalization_27_variance_bytes = 0; 
-args->dense_1_w = dense_1_w; 
-args->dense_1_w_bytes = 0; 
-args->dense_1_b = dense_1_b; 
-args->dense_1_b_bytes = 0; 
-
-void* dfg = __visc__launch(0, root, (void*) args); 
-
-__visc__wait(dfg); 
-
-void *result = static_cast<RootIn*>(args)->input; 
-hpvm_request_tensor(result, 0); 
-
-__visc__cleanup(); 
- computeAccuracy2(labels, 10000, result); 
-return 0; 
-
-} 
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_beta.bin
deleted file mode 100644
index bb1eb07a8e262d2f4d941578fd4c19d6a90c7562..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_gamma.bin
deleted file mode 100644
index 931c8925b89f363a41d3cf81483bde60abafba61..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_mean.bin
deleted file mode 100644
index 633bdc9fd4a9ef052ca8b6ab488a156002e3d4b5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_variance.bin
deleted file mode 100644
index f92c73f59eb5eb35ca94e3ce006e5f3c4f60ecef..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_10_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-njÏ>å‰>(è(>]÷>AuH>ôÖ.>«ïe>c*W>ÕäY>5T3>hsI>H>ó>õæ>2þS>íj[>¶†‰>]€>at>;t=>{¾D>
£¢>JcÊ>.ý`>­·&>«–>[À¢>³#Þ=W¦ä>µµ>‰Aª>f‚>Vw>T@…>\’%>"i+>¹d>qB?ÅÚ8>”>~>A>ÂŽ;>u8G>*¨3>žj>P Ê= w>Knd>­V>®ã8>ÖÃu>½Ê>¤Â?>®Ãà>P2>o·=d>ëFI>û¶Ž>‚º&>'A‡>¾úù>Ó>l±8>@MD>>ص>Ýfü=P>y;g>-ú³>–üC>¾s>Æ8>Ä=y>…>hŸ>
-T>/Ð{>ü€Ù>hæª>gw>î}>ŸC¢>à9>D6>Ð1->å2B>~.>·Ì">05­>'þD>Ï…€>²Ò3>¸”y>×Âþ=c>–O°>pëÏ>OVá=Òó‹>	û=CG'>çO_>š&>‚íw>ã’>%>»$>ÇG4>Ó¸>LP>ñ6P>n>«£$>ï’Œ>…»€>ôÅŠ>þœ>±v§>W.q>îϏ>:ëh>EäP>o¢F>Îg >DK5>êҐ>ÿ!>_FQ>q‚&>ÖwŸ>]e<>ߤ>Ô²j>³D>`PC>ýA$>&1>Îc·>TÏ7>%>™Ëy>Uð¨>QÏ>·*9>ÑÙ=UÊŽ>ä
>ªÒJ>ÁÙ=ä">Þ¶>ç>Å\a>èk1># >‹p)>/
¤> H]>ÖV>·ª>`¯>œ'T>¼3‡>²]©>6¢>¥‹·>Xã¥>óŸ=°’Ð=c¥Â>öÀ8>sQ1>¿{°>}§>>“>f…>#äá=Çi>…Â>œ®m>¡l«>£v>
=d>ØOX>T>Ìõ;>z®>•±V>ÆÝ^>K>#žç=œ9S>~F\>üg|>‰É>O»®>©"p>[ÆN>[YD>›£ý=!U>&3>Ó´>\û>v>¶,ƒ>	Žb>•=e>¹Ò>S8…>Ä03>æÇZ>³Y>†2>YÄ`>C¯r>áÍ“>îç>¼Xð=ï‚F>‚Bk>Þ?">SÅ>ÙLž> øP>ôgÚ>HÔ¤>y/>î$>˜MÄ> –•>.‚ô=©u°=å6>5@>y>t÷à=RŒ%>ø¬> >2Òj>dO%>Õ˪>&|O>EVn>Ÿ¨>íË7>­l:>HzŸ>³G’>!y¹>µÈF>
a(>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_beta.bin
deleted file mode 100644
index 5918477d3638e851c3fdfc47dc550cea3afa7d50..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_gamma.bin
deleted file mode 100644
index 6b3d705199383135bed811a6fdaa237d754487bd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_mean.bin
deleted file mode 100644
index 965edb6440d48ce4b9abc68cd3b9eb1d3f9cf3da..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_variance.bin
deleted file mode 100644
index a7a4b16bd7e581a4fdf1819ec0484559febd1fca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_11_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_beta.bin
deleted file mode 100644
index 8ade4cf080d7d3228e752d284ed500ba6300d261..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_gamma.bin
deleted file mode 100644
index 6dfb7c3833821b29f9230df806c4abc0c16a7b59..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_gamma.bin
+++ /dev/null
@@ -1,5 +0,0 @@
-	«S?#ªI?ùàn? :g?o%P?ßR†?OŸP?û<?á6?†W?ŁM?ߣH?B»d? i_?Ã{W?ÅßC?
-X?.c¥?ä=^?k÷>?Ÿ%{?\þM?uI?øM?çIž?9Qa?éØK?¢AR?K^^?¦9?À#œ?‰@˜?ÏõD?T?‡sh?ÒÀX?HèM?Ì_?J%X?¿U?ñEn?”O?nóŸ?ýH?-±¡?j%r?Ñë]?’;?VÞW?m f?™Z??á™Y?@T?ÃxU?/M?÷ŠO?j6Ž?‚™[?ÿÜn?r´D?{ÒJ?xÌV?°ïY?R€?®lH?ÎÐK?m°T?Å
m?¥Û@?P›L?ìÕ˜?'î?ò˜Y?Ä1?&±?$L?1¡¬?— G?ÚIw?˜ñ?z4? |K?ñN?,™@?a¦H?	dZ?ÈóY?s´N?Ÿ)ˆ?°yd?³ù£?†\?<èX?ŽåO?¬N?²ÚO?™?4aQ?Xy?
-)Ÿ?›^?7uF?(X?	hš?³?A3u?¸-“?«7P?=×a?œ‹C?ßøˆ?
qq?$ÚP?à߁?Šì¨?ö^?%œp?kO?”Q?Šd?_G?­ˆ??ïÞ@?½œk?<öV?¬<R?°>?.jO?„Œ?2¬Q?¥ûª?µÊY?ÓÙD?L—f?EU?c²6?O©®?Z(H?‰Š?­KX?p¦T?‚Jm?…;?ÇŸŠ?¶€?ým?­Øp?¨@?~Ó^?;öC?/€[?ÃÑ©?zÅ‹?1éH?ìT>?p b?q9^?|	K?
-ÞS?ÑE?[ô[?;ï’?/0?¬`?°µZ?FuX?o@c?eÑn?ÛvL?>š?êo”?ïèg?
-·q?2èF?AI™?j™=?7ÖZ?üó_?$ÆH?¹È_?øaW?jªƒ?ÅwT?^Ïm?•P?tZm?ns??°r^?]»U?@bk?†Ž?fŸ?ð'†?ñ´‘?ߤ?#Ñž?V'¢?[hM?fY]?
¤?’?q¼a?Ýþ[?g}¡?IJH?Š3”?ç\F?þ]?ÂS?w2D?—!V?¹IV?èõL?¡Œ_?øâ™?ÖxH?‘ùX?éH?+K8?†áV?ûR?–ß“?¶EI?˜'Z?Ñì^?¶~@?ö:–?ª¼_?
²ž?ÒÐ]?C¼·?!V?ÕK?Âc‘?9i”?XY?$¸D??Å©?V#E?>Ž`?!Z?Éñ˜?c¯U?Öæš?v9?jfX?ÌvŸ?3aV?ÍðR?qt‘?ü
?ôýE?b{B?(ß7?{I\?&	K?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_mean.bin
deleted file mode 100644
index 8899c2ad8395a98c752b1777095018cc90ca693b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_variance.bin
deleted file mode 100644
index 9206092b5ee7fa6178bb9109a9aabd5dbfaa7ccf..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_12_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_beta.bin
deleted file mode 100644
index 0f5fe8656435b28ec4b928af599b0a63915a651a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_gamma.bin
deleted file mode 100644
index c79d7d0b02b65ea9953bfd1fa164773f96e5ade0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_mean.bin
deleted file mode 100644
index 2a6d471779cb2634718545d33827ca1d8d023c07..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_variance.bin
deleted file mode 100644
index 5a2e2c8ca3645c6115b341b71141029d25064f18..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_13_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_beta.bin
deleted file mode 100644
index 79948d2a5e40f633e6675c9c8c98f186a3ae2626..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_gamma.bin
deleted file mode 100644
index 2f9a59ae913b2fcf4ef44018e295a055ea357d45..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_mean.bin
deleted file mode 100644
index 278d39b1a67c00a4015d2687ab936ddd4cbc6e34..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_variance.bin
deleted file mode 100644
index 4749c1a52d14caccf7df518ad56f2c03901dcf1a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_14_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_beta.bin
deleted file mode 100644
index 27f1a01dee6e2c9631ef312015fca880f8aa7b99..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_gamma.bin
deleted file mode 100644
index 0fe3148783c75679668beae35231fa2eb0308a8a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_mean.bin
deleted file mode 100644
index 9701d55c3d49a2d4ee43a45dad07886d62591653..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_variance.bin
deleted file mode 100644
index f679da9df83af326cc3d886528c298157ffbb561..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_15_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_beta.bin
deleted file mode 100644
index c2802a0da57a45a0839b9896a3dd0a9a70b8e669..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_gamma.bin
deleted file mode 100644
index f94cebe4a7af3a4c840c2f8b9bbb9a1ee7cb5b29..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_mean.bin
deleted file mode 100644
index a6d415f6dfd476fe1fd620794230c6d289158f50..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_variance.bin
deleted file mode 100644
index efa5fcfd7916e86848227806134efd7b4ec1e55e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_16_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_beta.bin
deleted file mode 100644
index 41201773cfd82292ab63ade568191ed261648538..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_gamma.bin
deleted file mode 100644
index 87613f6bc687bd539da0dd3fbda58e19a3e4071c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_mean.bin
deleted file mode 100644
index dee72d911fc96d785150d99101faac2905c61bb8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_variance.bin
deleted file mode 100644
index 86732c56ca1d6fa38ed0ccd379a26a7756816f7b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_17_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_beta.bin
deleted file mode 100644
index c520fdc378129c16c3c7ab8772faea68e00fd4f7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_gamma.bin
deleted file mode 100644
index 1aec3276306988ccd80ab907faba7538170d6e0e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_mean.bin
deleted file mode 100644
index cf9f6a04871515eae7a1aee7c9d103ca13bc8aae..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_variance.bin
deleted file mode 100644
index 7b46f134cd68995d45a2baab62188fd775e4ae82..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_18_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_beta.bin
deleted file mode 100644
index a4a7d99bc7b4c8f1a0d5dbdc4385036d01586d33..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_gamma.bin
deleted file mode 100644
index 60ea687e491464d474868e42dfc21ce1cd67961d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_mean.bin
deleted file mode 100644
index 2d9c9ef86608e1af225cd46ddd07d3a2bb9d5853..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_variance.bin
deleted file mode 100644
index f4e2ef2b5ae595944b6d2a4191594a2029508b1b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_19_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_beta.bin
deleted file mode 100644
index d6a711c22f8e5e9b9df5fe17fec24e12d35c20cc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ˆVT½ý¿P¾…~¬=Œ¶‚>v6R=ÑR§¾ P£¾Öw‚¾þv­>˜é ?qlk½!’?·cÜ>£
-¯¾)šs¾(ì>!<(?Œë>o÷½¹”=6X¾êjA¼eê½&\Ü>Å—I¾ÔÞP¾].^=(ÿ¤>ã g?‹r?Tõ>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_gamma.bin
deleted file mode 100644
index 9565d3b2a5ed07f2017c79534d689a729160ca46..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-9‚Œ?Ýåf?íDƒ?œ[€?ú†Ž?.8€?Z!„?L;|?胁?‹ƒ?ŽÃt?.ƒ??î2q?6Ɂ?!?o?©¢]?Žmx?ýXƒ?§‚??9­?Mº„?éÆr?f?~?>Ò~?JŒ?sh€?‰j,?üt}?Vt?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_mean.bin
deleted file mode 100644
index f552c5162cd4d3d2ed8c0edf098c2a9adbb403fd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_variance.bin
deleted file mode 100644
index 715fe55fd43af30b967ade11301595dd051a7770..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_1_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-?P¬A%l4AaAF@B|ÝAKö@È…DBB„>‘AŸó¯A‡¥@AA÷B"ôÍ?jþ@ÕŒ‘BQ-µA‹…BZBé?ö)¦D¼]øB8]MA•,AÐå;@àù€@Ê·þB¥žA¨¯ŽB²[®@¼ó^A5¬?•ÃÂ@¤œ@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_beta.bin
deleted file mode 100644
index 5291d00818ecc56eb039c71ed86d1a8e7e0f03a5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_gamma.bin
deleted file mode 100644
index 0ac1e2c1fa63ce2deb08f1b7a5aacd925749385b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_mean.bin
deleted file mode 100644
index f183a0ee683d40cc26247a32963e6321f85e7688..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_variance.bin
deleted file mode 100644
index 1d9fac8cdd2e32c1e821deaef3ad2a6bcd4cbdb9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_20_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_beta.bin
deleted file mode 100644
index 393f76218be9548b415c5b1a43a3c63a302b7300..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_gamma.bin
deleted file mode 100644
index 8b84922da7063fb41b68d983475c4c9bf91a2ac1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_mean.bin
deleted file mode 100644
index 78f070dc6515294f189e0b71692e4f61981608fc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_variance.bin
deleted file mode 100644
index e2e11c338fb2ea2a00d3aae3798ca3a2fdb82a1b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_21_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_beta.bin
deleted file mode 100644
index bf38673377e42584d82b848299c7bfb531655de5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_gamma.bin
deleted file mode 100644
index fd397b675a9a5da3fc1174a2f56f84ef3d67a8e8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_mean.bin
deleted file mode 100644
index 13549710237f51a5a9c84abf6272275396fff888..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_variance.bin
deleted file mode 100644
index 8102a808657f0b45d3a2a959bb3793c24f0c14ca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_22_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_beta.bin
deleted file mode 100644
index c396a8e2939c25d30b2021e6ca343913021309f3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_gamma.bin
deleted file mode 100644
index 0ee822b7e19677f3b7f7fcfce5456c2b1082efd7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_mean.bin
deleted file mode 100644
index fbf6f4eac60ed424271646218cb74ddaa5d74104..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_variance.bin
deleted file mode 100644
index d630a7ac1ecc23cfaeb1c88311dd6e5c6c4bbdbc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_23_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_beta.bin
deleted file mode 100644
index 3c70dadf33fe75b4e62ad704c6e4eebfe726792a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_gamma.bin
deleted file mode 100644
index 09cd79dc17aea4d5c5b6c604248a81d929170e45..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_mean.bin
deleted file mode 100644
index cbf013bcb470738d762c2cbda76745bf80ec765b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_variance.bin
deleted file mode 100644
index 0039d0bad928dee087c70a587d0e5a843790e077..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_24_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_beta.bin
deleted file mode 100644
index 0c9f7ae71b66a85ed843a45703717064be84a64c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_gamma.bin
deleted file mode 100644
index 8ae7623c12452151e9a4b100cd344f9b46121bab..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_mean.bin
deleted file mode 100644
index 062398cda6d3315629ee845e1bdd7d4623bc7493..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_variance.bin
deleted file mode 100644
index 0b5029b6aba8673c6fd7a9844c0feb4b8d7da490..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_25_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_beta.bin
deleted file mode 100644
index 1edd9d65782ee53219b97efd095a0d31af296d06..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_gamma.bin
deleted file mode 100644
index f9885c71b64218be5ce4187a9306e1869c41b5fc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_mean.bin
deleted file mode 100644
index 9d34da9b2aae4e306e7061e380168ac6bc0f7a00..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_variance.bin
deleted file mode 100644
index 2bd6648fa7d61af054f9d36916cc1975f3f351ae..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_26_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_beta.bin
deleted file mode 100644
index e6b513e4055d1394fe9eb9437b00864d570780aa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_gamma.bin
deleted file mode 100644
index 0349ab56289301dbc5d95375e0a553afb8cc8cf6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_mean.bin
deleted file mode 100644
index 8ae8e0fc3d161ef33ebd15cbdc620863332e8216..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_variance.bin
deleted file mode 100644
index 602be2e5a92239d688e30a082d79f8bec599c27f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_27_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_beta.bin
deleted file mode 100644
index c9af5d00060958d9ce8073e95c74483ba63bcbec..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-D=>½Ì-¿’?9½Ýà´9œÓj?ҒȾŽ…>%¾Ý
½–•Ó?Š?£¾Û'ã?’Z<>—Ö¿;N‹>âyh¾[ÿM?gÁU¼{³-¾¤¤=Ìr¾”öç¾65V¾Î\ʾ·C¾*\ ¾
-:`?™N<U”?~ZÈ?|•É>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_gamma.bin
deleted file mode 100644
index 59b78a30bf741b86e7bcd8346981f76749c2a981..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_mean.bin
deleted file mode 100644
index faa537236ff696e81e93fdcffef78e86c66ead9f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-©+4>뮂?zÌ?ÂÖ’?Ã?Ĥ§?˜­Þ½,c@ܶ׾¨(µ¿)Á'?ßì6ÀI+¿©Ïª>X¼@BîÞ¿N‹²>Èo¿ú©ë¾Ý±,¿óØ^>ì"¢¾‚}4?r@¹B<ÀWÇJ¿}ª¾Ûi-Àôm“¾|__>Ý¿”áÙ¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_variance.bin
deleted file mode 100644
index 9a9ec730a4aabf7b35e502daca5dfe0dbf113418..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_2_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-)s?á}†@à
’?°êû?ßn@sQb@ êR>‘¨üAÇ'¬>
-°4@곂?2p•AYd?˜Ó?Ó9A#uAKI0?¬é>“˜P?‹¥’?>U>?ŒØ<>=VŽ?æ`4AŸ‡Aj•®?Aƒ[>»[Añ ö>¼\º?}I2?ÔF@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_beta.bin
deleted file mode 100644
index dfbcff725a71852e107a04917d0a65a3544604e5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_beta.bin
+++ /dev/null
@@ -1 +0,0 @@
-z½}=»V?Ÿ–Õ>2|Š>ƒÌ¾³ç“>÷훽îX<F—X?¼Ƚ÷Ǿ¼G>ùg¾'x>àñ(>dá?]kß½};¨½ü’Ï=aƒP>þ᤾ùm?m–¾ S¼º„ûÇ=k;¨??|Úè=D@¾Võ*¾‚Ò
>±öê:\ŸV>S*Ľâ¾äTI?áD>*¥>!)Ž>šÎ>âp>ñÞ»=‡,“=P¥[=©½tyW¼¤p¾é·J>J>ió<Ï-­>–Vµ½×õS>ƒ¼²>@ã#?ÿM*¾-cŒ½ª°h?^òý=ÔXš>kŸ;½Á5×=;ŠX»UE"?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_gamma.bin
deleted file mode 100644
index ded64a0e5a70a9155c377e8a8244b85f623dee46..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-œÃ„? ·?¹?u? âz?¬~?ò”|?q?lŽƒ?‰ñ?z	‚?O€?ä	€?Hå}?Ì€z?R?óå^?ƒ¬?ª:…?(è€?zp?ñvr?óœ]?Hƒ?Agƒ?¯[‚?é+€?&1/?ìè?ÒÞw?¥}?å]€?ç?JD~?Hƒ?o(~?6ñ'?/~?EÖ~?Õz?ÆBl?ä?.€?¢ø?9g?a°ƒ?›‚?n€??O?€?È‚?…`s?’‚? ß|?äv?ŸtT?L¦‚?
¤ˆ?ù;ï>èÜ€?-±v?êj?Æ#‡?ˆ×~?KSQ?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_mean.bin
deleted file mode 100644
index 058394e6ac8c95cec8fb6050daf47289e8c81b48..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_mean.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ø5¿›‚Ï?Žî–¾W„ÀÑŽ">}‚IÀ°ì½ÿÕ@¿©@‹ûÏ?&gü>ŒíÅ?Ã~Á>?34¿mëN?y’1?aŒ> ß¾ÞÀb„,À—qâ?j%„¿
-@ÓÀÜ|ÀÔ{™?ž´·?D @¬]î¾T„>tí¾¼#ˆ?Ôž–?qŽ­¿öÂ:¿0Ž>¡Ji?ܽ¤¾Òá?õwʼöX¿™<¿åÀ7aD?Ê?°~²?ÿŒN¾8„å?ß
¿‡U?°Í§¾ß¬?§EP?€½JO?¡x@ÏcF@¦‹@Ù_@“Ô³¿bèà?–Ô¾ò÷¾0óµ¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_variance.bin
deleted file mode 100644
index d5dba0a9275910fdded47a2604453ae46f611c16..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_3_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-:Â@ª÷í?P&å?{¯Œ@õŽw?ÉI@ÔûŒ?Ç—?Ó¨û?†AÆ>sÎ?Bˆ??þÙ?Ú’{?·è@ÀÖÆ?ç¹%@s½–?F.F@ƒªí?â¹®?A<@Ÿ?QN»?N?˧“?ÁÌ@
…ä>M&?³²?¬“G?³®¯?å§¹?×mt?®–â?œv?Ÿ/Ö?Z0?AÒG@ÄSµ?û<?Î
É?*ëx?œLÒ?é5ƒ?ÃǪ?TÎ@«8(@0]?ß3@QÆ£?kä?»,?%!©?6ï?3or@¤û
@ÌO?•m’?{Ÿ?“¨ô?fêZ?lg¿?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_beta.bin
deleted file mode 100644
index 70ddacf8f0bd27523892f5af52ded3302c4715d4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_gamma.bin
deleted file mode 100644
index 3f64ef0b25bb6e00a6012f360e65812d22ca672f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-Ïeq?€¨¦?ލ?­ÇK?SM?^ãp?°‰?Ð9„?Ñ—»?i”^? |?kf?¿áv?bÿb?í÷ƒ?”tÄ?°Ì1?)·?²ù‡?½W?2Et?½e{?ÊÝ‘?Ñu?ìzŠ?RC†?ßê?÷EV?/V?<m?9v?ª³(?:‰”??SɁ?ß½±?P36?z×Y?Þçs?å[?ªrb?^û:?QwC?XŽz?Ö‰c?Hó‚?6*g?ó“4?Ñ¡`?92’?,Œ?Ê€?"X?¡ße?+¸™?õk…?*#Ï?Ÿ+Ó?óz?	Њ?oM?4Í“?Ó/?ç’Š?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_mean.bin
deleted file mode 100644
index 28c78d2db90aadc66f0d1f7d647e32044fd12744..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_variance.bin
deleted file mode 100644
index 8f361cbf915cd5fb93f32847280d50dad8e9b791..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_4_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-õäe>À(?nE?¨Ú>[½i>ÑO'?à(t?„kG?(‰d?Áxâ?òLÍ>Œi?ˆ`·>´ž?wÒo?•­ï?LÙé?@¯„?ܽC?š5ˆ?Mâ,?Ã0Ð>4£Ô>©ËÀ>—­}>¨`>„¶>ú]’?Ø@·)@šZ@ß5R@øÿF?Án¯>Á5ç>º}œ?à ?Ðÿ>ãš@R‚œ?¾Å—?fô>éž­?²'^@!vÝ>2,?)©®>®}A§ß†?aåm@9\?¯Ä?S©…?ë´?ÆB‹?ƒµ>ªâb?´bW?ÈC?FH?ʇ›?߀?Uã? ?ˆ?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_beta.bin
deleted file mode 100644
index 37161ae89f38c6489ae9ed0d99ad2df5a5f2f093..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_gamma.bin
deleted file mode 100644
index efefd0af2fbdc436d3321906166debd0323c1571..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_mean.bin
deleted file mode 100644
index 7eb215a96c6fb385ec761cf16be0339f3656b717..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_variance.bin
deleted file mode 100644
index d220b9e27ad8cd5a7b4bcba39105c8ee969bc4f3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_5_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-Oús?×_?,Ì?´È^?’ŽÊ?ôó7?öÌ@®‘Æ?ü9ž?v©Ž?Àeø?±\›?)9?S\U?¥B¼?ú¸’?ºc¤?Qúì?øZ3?'?o?š|?þ&¾?–_’?µ¼€?1Ë?+*O?%‹?g»
-?ØR?)Í>?ëD“?غÿ>UŽ?#®6?ªº‰?)·;?wÙ’?0®]?çjg?-B?Aâ?RÌ?öŠI?[O9?fBB?Ý[?ôl?âF?ª´º?éË’?µ”@µ?È‚?\h@?µ¾'?D?Jx?°Ç(@ë,§?É%?áæD?Éa?j¥„?: :?‡Ò«?!·q?çò¤?FG+?³wY?;j?ÇSe?h>Â?>¬?õJ‡?¥GÎ?É”1?ôŸÇ@l)’?Ç`?ë.V?J+?'çü?H«Ø?)à©?™?p?mr?ôÈA?vª?ÿ²š?æJŸ?›5?lK?g.†?•bŠ?O{ˆ?3Tk?µY?\hœ?òa?®Pt?JM%?^?êC?êtª?ÏŸ?”µ?ÞuT?B–?8w?S‰?p4›?XR?âf@?ïÄO?NÙk?ðɉ?Ԑ?´*?l;r?üüØ?<¡{?°×>>¢?Y‹?e8€?‘Њ?!?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_beta.bin
deleted file mode 100644
index 39a7a8779dc5ba6a394748a88391fbbf8b35ec23..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_gamma.bin
deleted file mode 100644
index 8bfc97196078b732c1ab61e8a3bbb656d29d3728..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-–¼‹?B„?çd?Ó5?Mn?p¶I?¤éº?«›g?µy?é˜?·P\?œF?ˆ5?*ç:?íR‡?+Q\?&ˆ?µqN?!
-G?ºU?üŠ? nt?òÂ(?w£?g›{?•óH?,G‘?Ó’?ÎÛŽ?Ɔ?Ç>’?<‚ƒ?2¿›?˜ºx?˯<?€Z7?Ó!?¾ç}?$‰?¼æp?ÎT?þrR?ç´›?WŽJ?µ’?]>c?ùQ?-±‰?îÓ`?¦”Y?”Úp?TÉt?|ˆ‚?óA?²‚?‹}‡?¬'j?9Î?¾N‡?`ŽW?FË?€z?X{?Áœ5?©«o?yS.?ê{?FÆ8?
µ›??Ï~?ÛæT?_)I?+Yw?õãw?D~?Bû?Ñ"Ò?†­?Âsƒ?>ÞW?>6?¼¶¤?zûD?Úé†?³ˆ?ŸM?à`?G{'?¼¾–?=Â]?Ú€~?»R˜?˜®ˆ?ßMz?Ÿdu?͈U?œö1?£Ü–?ž,„?\ÓŠ?<Vf?{8)?“&o?ݬˆ?t~—?ïï•?ôÆ”?D…?X/)?`Ú?@œ ?cj?-ƒ?“?¿Às?hím?o?åÅ—?Æ+‘?ùW‚?²F?ßø‚?>F?@–m?¥m?߯?ì‚?ì3?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_mean.bin
deleted file mode 100644
index f427d142f3bf2147d302426700b2f0ee817ec308..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_variance.bin
deleted file mode 100644
index 4c571acca77f147260874e9ae0ff1722076746ca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_6_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_beta.bin
deleted file mode 100644
index 4e72081f35c879ebc0d0bc57e3ced79a81200854..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_gamma.bin
deleted file mode 100644
index e8ac9fe5f793a80b78c9a2099d37a96d093097ba..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ORa?Œ^€?o'€?Tpy?b‘F?ø,~?¬£?€ž€?B‹u?ÿœ€?g“z?²€?d}?j~?_gg?o?šÏf?.:k?#*\?Ò€?û;o?Ïö?L€?Yy?ÁR€?Zíp?lVq?¸€?GXZ?±Ä?Ñ€?—M~?¯¼q?8|€?›Dk?j?6ØT?\,€?¦€?®†?_?Öþ|?wòl?jSn?«€?qè?”Zn?!ªv?€ƒr?æ\v?	„~?z]s?”€?¯*x?|U_?N€?å*?üU€?FÚu?ïÇn?«T?á/y?”™€?ÄóS?*ÿq?L4€?Su?Ãq?q`w?´f?Z2~?È)?Ø<€?­x€?0¥~?0;e?Být?tß~?¸S{?
þ?+T€?{á?<†~?[?
-C~?»@?81}?€‹?$€?ƒ»?Z\}?/ÉV?T·~?4|?Lr?¶‹d?ˆ i? Úg?þ„€?âL?¼wu?›€?¾Ù{?«
i?à6€?«Úk?–•v?Èyn?×d?X¹j?À?Ód?¸º~?‘s?Íá?#Çl?øg}?"I€?åNx?wÕ?ú˜x?äe|?‰dr?Ö/€?zw?À|€?ïlu?Øn~?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_mean.bin
deleted file mode 100644
index 42ec4b5d965a8dc26c8d6218195e1c87739fb9fa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_variance.bin
deleted file mode 100644
index 17911f473710c3e37246c1de1a4121be21585ee2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_7_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_beta.bin
deleted file mode 100644
index c3b0b374f59d9c906906d51621a99704e26ed422..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_gamma.bin
deleted file mode 100644
index b271fb02201a3f354162e281cf1bac5998ed28a2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_mean.bin
deleted file mode 100644
index 5888235eb54a3c9ad548a51708eb39c13d7e8ddd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_mean.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-N¡>Ða¿éNR¿Þ0¾>	Ô>é«8=J_¿í™
-¿À0?*ö!¿«£½>Ï¿VhE¿„«>Ð¥>˜¸Ë>S{„>U¼>
Çt>?+¿}¼>ØÀ>mÈ¿ŒOT¿]+¿9½>k„Ú>Gã`=Õ?E>o’ƒ¾É“7¿6ã<¾~Ä>N÷¿ýÄ>ÝŠ[¿È½ >2\¿7Ò.¿77e¿yß¶>ŠÁÏ>°|¸>ßRÊ>… ¿™e¡½±#>˜"µ>t´>NÚ>¯¾*°?SÏ¿6g×>dX^>M¿=í¿§…×>×ʼ>ÍՐ>l¨>ÙqC¿™gS>‹¤>b+¿g¿Þ>sº½>3È
-?&,©>Èâ¸>p‹¿u†¾Éß+¿ÎÂÍ>jÑU>èÞr>P>½ûä?áØ2¿ñ)¿Ñ
¿ÜP¿D½‚>Ú_¿çã&>ø|²> 9¿$6 ?Z3­=ÍYL¿#’>˜3*¿©ö;ÐOŽ>œô›>¸ŠŠ>?*¥>¹ÑS¿Îz˼Bð>±ï<¿aC<~®>/ªI¿”vº>œ¸>¨á¬>GÀ>-Y¯>'¿­™>Æw.¿Áî¥>½s3¿Ö8®>e“Ñ>¼¿ï¯‚>gCV½À|ç>óhá>¼æÀ>Sç:¿:Á>Ó¿˜ý¨>…±¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_variance.bin
deleted file mode 100644
index be017b25adccfc236b22789abd11b0ff50fb5a40..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_8_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_beta.bin
deleted file mode 100644
index 13e7e2a820d8c80f79e05b91540c0d5493387306..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_gamma.bin
deleted file mode 100644
index 4d65230c8dc292bceb2414527469eca65674af13..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_mean.bin
deleted file mode 100644
index 67b8b25e4fff4232001931073a803f3dfe363187..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_variance.bin
deleted file mode 100644
index 59a0b1e0d59434dfb9d94f4cefdcfab4cdec0b93..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/batch_normalization_9_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_10_w.bin
deleted file mode 100644
index 2000dbf19acd71e28da72db217f7f34d80be4d55..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_11_w.bin
deleted file mode 100644
index e38c7f59fa6346b7a4c1c2e676cec648277986aa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_12_w.bin
deleted file mode 100644
index fd7b6121bdd5b28f0c65caec9e90676d9ccc2171..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_13_w.bin
deleted file mode 100644
index 2a6a844fa8e1ee98017c3d1e3a9024f39c6f1568..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_14_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_14_w.bin
deleted file mode 100644
index ff22cedb2ef6ef7aaffbf434d5dae78cf813de27..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_14_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_1_w.bin
deleted file mode 100644
index bafe4f5ad48926ac6a00086e2e9ce2cda85bd9ec..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_2_w.bin
deleted file mode 100644
index eff0fc063670e2a30c86b70b2611787f454db6fb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_3_w.bin
deleted file mode 100644
index e09cda44638fd9f0032b47d6f5fc7ece69cd24b8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_4_w.bin
deleted file mode 100644
index ce941bc4965f21e57f6b6cab24639d8bab593b6e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_5_w.bin
deleted file mode 100644
index 12a7e35468d1d003b9f65b4a515f82c4a2f42ca6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_6_w.bin
deleted file mode 100644
index 15c80714155c176c53788c7a4926ae90d6a50a54..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_7_w.bin
deleted file mode 100644
index aabaa5eb3ce76dba62573d51d7b63d037df1ce82..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_8_w.bin
deleted file mode 100644
index ad954d098872fcf34792606a50d7e46c6a0008c6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_9_w.bin
deleted file mode 100644
index 50ea54350fc605740424c8b6e5a48cbe7846181b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/dense_1_b.bin
deleted file mode 100644
index 1e697e20d8008cba5750a47aa9a53d8b29b1b0e2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/dense_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/dense_1_w.bin
deleted file mode 100644
index 9105f0e8d7739016cce69125dee5e8102d67c8d8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_10_w.bin
deleted file mode 100644
index f7cbc07e8ef10d1c910e8cb8e0880a263f944d4e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_11_w.bin
deleted file mode 100644
index c9fb2daae05c1272ee93cf8dfd817e08591834e1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_12_w.bin
deleted file mode 100644
index 58c263417c0669304fff4416cd7c45dc001d4f81..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_13_w.bin
deleted file mode 100644
index 36d45717f5a1435df7c2cecca1353ca326ea98f9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_1_w.bin
deleted file mode 100644
index 0224a1a1465811bf5768565cc637a9757e8db9c2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_2_w.bin
deleted file mode 100644
index 33c3af23f2fee0a9bd871d3e95c26d17b7108c29..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_3_w.bin
deleted file mode 100644
index 1bcfbd7df4591bde2936e7ccfa9b1f10cf9f0d1e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_4_w.bin
deleted file mode 100644
index 49a61f541371dd83a76c5efa90cd9ec3eaa13de0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_5_w.bin
deleted file mode 100644
index d488d6077e6a7e13a9bf8fbd9eb67fa735d6befe..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_6_w.bin
deleted file mode 100644
index 7ab35e18d4824343230e241e3c6ecfcc20b57b83..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_7_w.bin
deleted file mode 100644
index 569a5573a4f9a5a3f7fb87361b30f361abcff2cb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_8_w.bin
deleted file mode 100644
index 10dc6502f6d0c128cdeae1fd07359be2bc500981..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_9_w.bin
deleted file mode 100644
index 9112cb3cc2eb816e5e3592b00cd331c23b185b1d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/depthwise_conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/input.bin
deleted file mode 100644
index 86390e39e0f8515d52ca6d5ab99b98af7d72b93c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/labels.bin
deleted file mode 100644
index 72e2c6650e2d717f25484f9f67068be084e7f175..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/labels32.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/labels32.bin
deleted file mode 100644
index 870f85ff4802d369b0db3bf334ba566338f683a1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/labels32.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/layer_composition.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/layer_composition.txt
deleted file mode 100644
index 10692997a90e4490a91ad3d0e6e04285754144fd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/layer_composition.txt
+++ /dev/null
@@ -1,83 +0,0 @@
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-pool  
-dense  add  
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/layers.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/layers.txt
deleted file mode 100644
index 0bd2b554374c10d748a652f52e5427c716be0084..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/layers.txt
+++ /dev/null
@@ -1,83 +0,0 @@
-Conv1,10000,3,32,32,32,3,3,3
-#tensorBatchNorm1
-#tensorRelu1
-#tensorDepthwiseConv1
-#tensorBatchNorm2
-#tensorRelu2
-Conv2,10000,32,32,32,64,32,1,1
-#tensorBatchNorm3
-#tensorRelu3
-#tensorDepthwiseConv2
-#tensorBatchNorm4
-#tensorRelu4
-Conv3,10000,64,16,16,128,64,1,1
-#tensorBatchNorm5
-#tensorRelu5
-#tensorDepthwiseConv3
-#tensorBatchNorm6
-#tensorRelu6
-Conv4,10000,128,16,16,128,128,1,1
-#tensorBatchNorm7
-#tensorRelu7
-#tensorDepthwiseConv4
-#tensorBatchNorm8
-#tensorRelu8
-Conv5,10000,128,8,8,256,128,1,1
-#tensorBatchNorm9
-#tensorRelu9
-#tensorDepthwiseConv5
-#tensorBatchNorm10
-#tensorRelu10
-Conv6,10000,256,8,8,256,256,1,1
-#tensorBatchNorm11
-#tensorRelu11
-#tensorDepthwiseConv6
-#tensorBatchNorm12
-#tensorRelu12
-Conv7,10000,256,4,4,512,256,1,1
-#tensorBatchNorm13
-#tensorRelu13
-#tensorDepthwiseConv7
-#tensorBatchNorm14
-#tensorRelu14
-Conv8,10000,512,4,4,512,512,1,1
-#tensorBatchNorm15
-#tensorRelu15
-#tensorDepthwiseConv8
-#tensorBatchNorm16
-#tensorRelu16
-Conv9,10000,512,4,4,512,512,1,1
-#tensorBatchNorm17
-#tensorRelu17
-#tensorDepthwiseConv9
-#tensorBatchNorm18
-#tensorRelu18
-Conv10,10000,512,4,4,512,512,1,1
-#tensorBatchNorm19
-#tensorRelu19
-#tensorDepthwiseConv10
-#tensorBatchNorm20
-#tensorRelu20
-Conv11,10000,512,4,4,512,512,1,1
-#tensorBatchNorm21
-#tensorRelu21
-#tensorDepthwiseConv11
-#tensorBatchNorm22
-#tensorRelu22
-Conv12,10000,512,4,4,512,512,1,1
-#tensorBatchNorm23
-#tensorRelu23
-#tensorDepthwiseConv12
-#tensorBatchNorm24
-#tensorRelu24
-Conv13,10000,512,2,2,1024,512,1,1
-#tensorBatchNorm25
-#tensorRelu25
-#tensorDepthwiseConv13
-#tensorBatchNorm26
-#tensorRelu26
-Conv14,10000,1024,2,2,1024,1024,1,1
-#tensorBatchNorm27
-#tensorRelu27
-#tensorPooling1
-FC1,10000,1024,1024,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/mobilenet_layers.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/mobilenet_layers.txt
deleted file mode 100644
index c2a4a29509ad89724905c869ff900f8ecaa5bf8c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/mobilenet_layers.txt
+++ /dev/null
@@ -1,83 +0,0 @@
-Conv1,10000,3,32,32,32,3,3,3
-NML1
-NML2
-NML3
-NML4
-NML5
-Conv3,10000,32,32,32,64,32,1,1
-NML6
-NML7
-NML8
-NML9
-NML10
-Conv5,10000,64,16,16,128,64,1,1
-NML11
-NML12
-NML13
-NML14
-NML15
-Conv7,10000,128,16,16,128,128,1,1
-NML16
-NML17
-NML18
-NML19
-NML20
-Conv9,10000,128,8,8,256,128,1,1
-NML21
-NML22
-NML23
-NML24
-NML25
-Conv11,10000,256,8,8,256,256,1,1
-NML26
-NML27
-NML28
-NML29
-NML30
-Conv13,10000,256,4,4,512,256,1,1
-NML31
-NML32
-NML33
-NML34
-NML35
-Conv15,10000,512,4,4,512,512,1,1
-NML36
-NML37
-NML38
-NML39
-NML40
-Conv17,10000,512,4,4,512,512,1,1
-NML41
-NML42
-NML43
-NML44
-NML45
-Conv19,10000,512,4,4,512,512,1,1
-NML46
-NML47
-NML48
-NML49
-NML50
-Conv21,10000,512,4,4,512,512,1,1
-NML51
-NML52
-NML53
-NML54
-NML55
-Conv23,10000,512,4,4,512,512,1,1
-NML56
-NML57
-NML58
-NML59
-NML60
-Conv25,10000,512,2,2,1024,512,1,1
-NML61
-NML62
-NML63
-NML64
-NML65
-Conv27,10000,1024,2,2,1024,1024,1,1
-NML66
-NML67
-NML68
-FC1,10000,1024,1024,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/mobilenet_ops.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/mobilenet_ops.txt
deleted file mode 100644
index 8e18f2ec58cddb9ab0251229b1e908b23b71d6bc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/mobilenet_ops.txt
+++ /dev/null
@@ -1,165 +0,0 @@
-#Conv1,1
-Conv1
-#NML1,1
-BatchNorm1
-#NML2,1
-Relu1
-#NML3,1
-Conv2
-#NML4,1
-BatchNorm2
-#NML5,1
-Relu2
-#Conv3,1
-Conv3
-#NML6,1
-BatchNorm3
-#NML7,1
-Relu3
-#NML8,1
-Conv4
-#NML9,1
-BatchNorm4
-#NML10,1
-Relu4
-#Conv5,1
-Conv5
-#NML11,1
-BatchNorm5
-#NML12,1
-Relu5
-#NML13,1
-Conv6
-#NML14,1
-BatchNorm6
-#NML15,1
-Relu6
-#Conv7,1
-Conv7
-#NML16,1
-BatchNorm7
-#NML17,1
-Relu7
-#NML18,1
-Conv8
-#NML19,1
-BatchNorm8
-#NML20,1
-Relu8
-#Conv9,1
-Conv9
-#NML21,1
-BatchNorm9
-#NML22,1
-Relu9
-#NML23,1
-Conv10
-#NML24,1
-BatchNorm10
-#NML25,1
-Relu10
-#Conv11,1
-Conv11
-#NML26,1
-BatchNorm11
-#NML27,1
-Relu11
-#NML28,1
-Conv12
-#NML29,1
-BatchNorm12
-#NML30,1
-Relu12
-#Conv13,1
-Conv13
-#NML31,1
-BatchNorm13
-#NML32,1
-Relu13
-#NML33,1
-Conv14
-#NML34,1
-BatchNorm14
-#NML35,1
-Relu14
-#Conv15,1
-Conv15
-#NML36,1
-BatchNorm15
-#NML37,1
-Relu15
-#NML38,1
-Conv16
-#NML39,1
-BatchNorm16
-#NML40,1
-Relu16
-#Conv17,1
-Conv17
-#NML41,1
-BatchNorm17
-#NML42,1
-Relu17
-#NML43,1
-Conv18
-#NML44,1
-BatchNorm18
-#NML45,1
-Relu18
-#Conv19,1
-Conv19
-#NML46,1
-BatchNorm19
-#NML47,1
-Relu19
-#NML48,1
-Conv20
-#NML49,1
-BatchNorm20
-#NML50,1
-Relu20
-#Conv21,1
-conv21
-#NML51,1
-BatchNorm21
-#NML52,1
-Relu21
-#NML53,1
-Conv22
-#NML54,1
-BatchNorm22
-#NML55,1
-Relu22
-#Conv23,1
-Conv23
-#NML56,1
-BatchNorm23
-#NML57,1
-Relu23
-#NML58,1
-Conv24
-#NML59,1
-BatchNorm24
-#NML60,1
-Relu24
-#Conv25,1
-Conv25
-#NML61,1
-BatchNorm25
-#NML62,1
-Relu25
-#NML63,1
-Conv26
-#NML64,1
-BatchNorm26
-#NML65,1
-Relu26
-#Conv27,1
-Conv27
-#NML66,1
-BatchNorm27
-#NML67,1
-Relu27
-#NML68,1
-Pool1
-FC1,10000,1024,1024,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/promise_src.cc
deleted file mode 100644
index 146bc640cc4b1e8da65e3e7bb6cb5c7f2a007399..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/promise_src.cc
+++ /dev/null
@@ -1,420 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-int total_runs = 100; 
-for (int i = 0 ; i < total_runs; i++){ 
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-
-
-std::string dir_prefix = std::string("data/mobilenet_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -2.196306920051575, 1.347581704139706, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -60.89275047302246, 51.99256916046146, 9); 
-void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_2 = tensorRelu(var_1); 
-void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_5 = tensorRelu(var_4); 
-void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 5.713541553974245, conv2d_2_w, -0.9317721160650253, 1.0774258937835774, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.518589503288269, 6.810842518806449, 9); 
-void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_8 = tensorRelu(var_7); 
-void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_11 = tensorRelu(var_10); 
-void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.932139402866376, conv2d_3_w, -0.5316544661521911, 0.5753790403604531, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.482631235122681, 3.96730119752885, 9); 
-void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_14 = tensorRelu(var_13); 
-void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_17 = tensorRelu(var_16); 
-void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.103263397693674, conv2d_4_w, -0.36234098821878435, 0.4076913900375366, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.04261828327179, 3.88677932929993, 9); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-void* var_22 = tensorBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_23 = tensorRelu(var_22); 
-void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 5.383221302509475, conv2d_5_w, -0.3131200549006462, 0.29357679939270065, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.921469215393066, 4.338679324150087, 9); 
-void* var_25 = tensorBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_26 = tensorRelu(var_25); 
-void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-void* var_28 = tensorBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_29 = tensorRelu(var_28); 
-void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 4.316738154411368, conv2d_6_w, -0.23299247801303866, 0.2580290257930756, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.207789947509766, 3.932436970710759, 9); 
-void* var_31 = tensorBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_32 = tensorRelu(var_31); 
-void* var_33 = tensorConvolution(var_32, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-void* var_34 = tensorBatchNorm(var_33, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_35 = tensorRelu(var_34); 
-void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 5.830408106803901, conv2d_7_w, -0.20233777219057084, 0.18998308175802117, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.298286915779113, 4.848135117530843, 9); 
-void* var_37 = tensorBatchNorm(var_36, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorConvolution(var_38, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-void* var_40 = tensorBatchNorm(var_39, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-void* var_41 = tensorRelu(var_40); 
-void* var_42 = ConvLayer_PROMISE(var_41, 0.0, 4.446417809963227, conv2d_8_w, -0.17442735651135444, 0.17695830866694454, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.347910885810852, 3.6144364695549145, 9); 
-void* var_43 = tensorBatchNorm(var_42, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-void* var_44 = tensorRelu(var_43); 
-void* var_45 = tensorConvolution(var_44, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-void* var_46 = tensorBatchNorm(var_45, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-void* var_47 = tensorRelu(var_46); 
-void* var_48 = ConvLayer_PROMISE(var_47, 0.0, 4.518095604896667, conv2d_9_w, -0.14546796187758446, 0.15256431668996823, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.0287702755928043, 2.9487365779876953, 9); 
-void* var_49 = tensorBatchNorm(var_48, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-void* var_50 = tensorRelu(var_49); 
-void* var_51 = tensorConvolution(var_50, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-void* var_52 = tensorBatchNorm(var_51, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-void* var_53 = tensorRelu(var_52); 
-void* var_54 = ConvLayer_PROMISE(var_53, 0.0, 6.348575634956407, conv2d_10_w, -0.13025874522328376, 0.13558243343234128, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.2293100805282595, 3.5315046372413645, 9); 
-void* var_55 = tensorBatchNorm(var_54, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-void* var_56 = tensorRelu(var_55); 
-void* var_57 = tensorConvolution(var_56, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-void* var_58 = tensorBatchNorm(var_57, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-void* var_59 = tensorRelu(var_58); 
-void* var_60 = ConvLayer_PROMISE(var_59, 0.0, 5.221003110408843, conv2d_11_w, -0.11900172759592534, 0.12536374783515936, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.038203780174255, 4.004009407043483, 9); 
-void* var_61 = tensorBatchNorm(var_60, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-void* var_62 = tensorRelu(var_61); 
-void* var_63 = tensorConvolution(var_62, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-void* var_64 = tensorBatchNorm(var_63, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-void* var_65 = tensorRelu(var_64); 
-void* var_66 = ConvLayer_PROMISE(var_65, 0.0, 5.732498347759442, conv2d_12_w, -0.10839721685647964, 0.11625668607652187, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.3111015114784244, 4.462933233261136, 9); 
-void* var_67 = tensorBatchNorm(var_66, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-void* var_68 = tensorRelu(var_67); 
-void* var_69 = tensorConvolution(var_68, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-void* var_70 = tensorBatchNorm(var_69, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-void* var_71 = tensorRelu(var_70); 
-void* var_72 = ConvLayer_PROMISE(var_71, 0.0, 7.240498211860681, conv2d_13_w, -0.08623744961619377, 0.08859449951350662, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.175431394577027, 6.2043294754027345, 9); 
-void* var_73 = tensorBatchNorm(var_72, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-void* var_74 = tensorRelu(var_73); 
-void* var_75 = tensorConvolution(var_74, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-void* var_76 = tensorBatchNorm(var_75, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-void* var_77 = tensorRelu(var_76); 
-void* var_78 = ConvLayer_PROMISE(var_77, 0.0, 7.813958834648251, conv2d_14_w, -0.06813025139272214, 0.07002027779817581, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -10.920566423416137, 2.6442912578582534, 9); 
-void* var_79 = tensorBatchNorm(var_78, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-void* var_80 = tensorRelu(var_79); 
-void* var_81 = tensorPooling(var_80,1,2,2,0,0,2,2); 
-void* var_82 = FCLayer_PROMISE(var_81, 0.0, 2.8692066650391013, dense_1_w, -0.22301019695401192, 0.1442659378200768, dense_1_b, -0.1654396, 0.23336112, -1, -12.245949958801269, 23.80532513427739, 9); 
-void* var_83 = tensorSoftmax(var_82); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_83); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-}
-
-dumpExecutionAccuracies(); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/src.cc
deleted file mode 100644
index 25aec9bde3bc1aac157e2acc368dcddf866e455d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet/src.cc
+++ /dev/null
@@ -1,413 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("data/mobilenet_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_2 = tensorRelu(var_1); 
-void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_6 = tensorRelu(var_5); 
-void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_9 = tensorRelu(var_8); 
-void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_13 = tensorRelu(var_12); 
-void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_16 = tensorRelu(var_15); 
-void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_23 = tensorRelu(var_22); 
-void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_28 = tensorRelu(var_27); 
-void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_31 = tensorRelu(var_30); 
-void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_35 = tensorRelu(var_34); 
-void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_38 = tensorRelu(var_37); 
-void* var_41 = tensorConvolution(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_43 = tensorRelu(var_42); 
-void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_46 = tensorRelu(var_45); 
-void* var_48 = tensorConvolution(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-void* var_49 = tensorBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-void* var_50 = tensorRelu(var_49); 
-void* var_51 = tensorConvolution(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-void* var_52 = tensorBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-void* var_53 = tensorRelu(var_52); 
-void* var_55 = tensorConvolution(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-void* var_56 = tensorBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-void* var_57 = tensorRelu(var_56); 
-void* var_58 = tensorConvolution(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-void* var_59 = tensorBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-void* var_60 = tensorRelu(var_59); 
-void* var_63 = tensorConvolution(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-void* var_64 = tensorBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-void* var_65 = tensorRelu(var_64); 
-void* var_66 = tensorConvolution(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-void* var_67 = tensorBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-void* var_68 = tensorRelu(var_67); 
-void* var_70 = tensorConvolution(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-void* var_71 = tensorBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-void* var_72 = tensorRelu(var_71); 
-void* var_73 = tensorConvolution(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-void* var_74 = tensorBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-void* var_75 = tensorRelu(var_74); 
-void* var_77 = tensorConvolution(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-void* var_78 = tensorBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-void* var_79 = tensorRelu(var_78); 
-void* var_80 = tensorConvolution(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-void* var_81 = tensorBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-void* var_82 = tensorRelu(var_81); 
-void* var_85 = tensorConvolution(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-void* var_86 = tensorBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-void* var_87 = tensorRelu(var_86); 
-void* var_88 = tensorConvolution(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-void* var_89 = tensorBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-void* var_90 = tensorRelu(var_89); 
-void* var_92 = tensorConvolution(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-void* var_93 = tensorBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-void* var_94 = tensorRelu(var_93); 
-void* var_95 = tensorConvolution(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-void* var_96 = tensorBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-void* var_97 = tensorRelu(var_96); 
-void* var_99 = tensorPooling(var_97,1,2,2,0,0,2,2); 
-void* var_101 = tensorGemmGPU(var_99, dense_1_w); 
-void* var_102 = tensorAdd(var_101, dense_1_b); 
-void* var_103 = tensorSoftmax(var_102); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_beta.bin
deleted file mode 100644
index 3a3c16ade936aa34b0a3cfb69fc44dd06e0c596f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_gamma.bin
deleted file mode 100644
index ccf89d86600299d0d7f889df204e3678d6c4acda..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_mean.bin
deleted file mode 100644
index 267d2d8c504beaea608d4570f8b15ea21d453c93..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_variance.bin
deleted file mode 100644
index 6549c8a6b2777ccb8de60c04a2700719da5a1072..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_10_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_beta.bin
deleted file mode 100644
index 540493c9b4226e7ef24b5c4469bade83fe6cea12..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_gamma.bin
deleted file mode 100644
index fe4aa175634a2197ad2662454946b220fe8f412a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_mean.bin
deleted file mode 100644
index 026e0da4c1dd464b1587ea6d3b704af9eb51b534..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_variance.bin
deleted file mode 100644
index e04fbf8e32835860394e91c5272e356677f1ebbc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_11_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_beta.bin
deleted file mode 100644
index 56800f81bd29046426e2e0d8e0bd2875a19137ed..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_gamma.bin
deleted file mode 100644
index 3ca47080195988f9823ce115eba7c254d3fd7b32..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_mean.bin
deleted file mode 100644
index 396077783c4b4ee418fe85a0a9558c21475b3747..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_variance.bin
deleted file mode 100644
index 8121706f4841afb1ced7868008b3169aacd9ccf8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_12_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_beta.bin
deleted file mode 100644
index 6003e158bb435635051a9cbb53109af2141c8a01..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_gamma.bin
deleted file mode 100644
index 7e3aae0ee1a8ed6c49b95cfb7ccf03a3b189fb12..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_mean.bin
deleted file mode 100644
index 891f3cf20330d627ff066e80c9cc3b3c34fae9e3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_variance.bin
deleted file mode 100644
index 6275ad1098d147c4d570b257a13027c216bb8b07..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_13_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_beta.bin
deleted file mode 100644
index 7d7935135ac902a0bd732cddabdaa4a253e3bd36..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_gamma.bin
deleted file mode 100644
index a2335fae1f8e182353cccaef7e60ff597d4dbfcb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_mean.bin
deleted file mode 100644
index 06602d452ecf896c293c0fa970d8d5729b739533..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_variance.bin
deleted file mode 100644
index e6b98b0c3845ad21bdfeef06a8e059844194f1ae..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_14_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_beta.bin
deleted file mode 100644
index 9ec424c2f3a14108373f79303a157de7442232d8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_gamma.bin
deleted file mode 100644
index 718cb58ab279836bf65072301994a7cf390e0482..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_mean.bin
deleted file mode 100644
index 5a4a4f4623acd79158abeee5a49063a97a6d2a8b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_variance.bin
deleted file mode 100644
index a4ae2faa5e3ea8971f99c6439002a4df4ad88a95..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_15_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_beta.bin
deleted file mode 100644
index 28161105d5e29c1a12d5d9304ac8fff7cf0e255f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_gamma.bin
deleted file mode 100644
index c00ee7935d55b8f91b395543df2876916b143e40..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_mean.bin
deleted file mode 100644
index 2b9433a40e480f5f82a7642fe702d59a915d26ae..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_variance.bin
deleted file mode 100644
index dc674269c0e3021bcfb44ea119dfa3d269af526b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_16_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_beta.bin
deleted file mode 100644
index 755e35fb72c7434db1e231b969c4c432161e9bca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_gamma.bin
deleted file mode 100644
index 27c4601a2753b9ae98e00389a5abf9459b28b722..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_mean.bin
deleted file mode 100644
index 5170e293e0570e642df82e890b7735b3292e4f22..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_variance.bin
deleted file mode 100644
index 38894be707a8b7a29e5001976c303aeed943fe87..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_17_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_beta.bin
deleted file mode 100644
index f824f45ea5ecf73d8fefdf5420c95bcaaf953883..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_gamma.bin
deleted file mode 100644
index 97ac2066edeef07e6ef3b911e4d35139118673c7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_mean.bin
deleted file mode 100644
index ddbc637ce9d8dd94dba801181a8908c0fd9fe565..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_variance.bin
deleted file mode 100644
index 5f17074599c2685aaf7319abd785dde80f14c20e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_18_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_beta.bin
deleted file mode 100644
index 9e088f20d8ed2f91f1e488a0eee2ea13bd460727..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_gamma.bin
deleted file mode 100644
index 9bb04491903531762bf64c64ef4fe8a6ce0e5567..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_mean.bin
deleted file mode 100644
index b42f332db83f4016574feafa4dc3283cb2511266..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_variance.bin
deleted file mode 100644
index a055f9c6b7362a16379c9f10c50dcfe902a3c629..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_19_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_beta.bin
deleted file mode 100644
index 97bd676c0fb44e91065ccb7ca7054b98f62d3a3b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-IÏ>ÇF"?¸°í>d˜ƒ>Scc¿ßµ˜>æÇª=[U¿=W“Š>y—<¾ùú_>D—„<=Æ>=‚¶=û›=ؾ¦‚>¾h'¾Šý½º¿ù
-â>·zÑ>)Óˆ½é‹¼b©¾‡Ä>vÏy¾êŒ6?íë;?b|î<2
À¼´=a½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_gamma.bin
deleted file mode 100644
index f3d303789aba659a1920c2f263ecfb2972627d10..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ü%t?Û–W?ƒÏl?©ñ€?Ã,?™€?Àƒ‚?‹ƒ?‘7?àã?x™~?>€?=?í0?™ì?óB|?0F?}í~?íã€?ãÏO?éin?Ìl?:¡‚?;}?«%x?`r~?ÂGy?%~F?Ͷ>?
-½€?…‚?É€?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_mean.bin
deleted file mode 100644
index f7f4cf323938b1c9938278c7ff412d8bbaea0d54..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-`eº¼ûí;<ˆ;N—ƒ;Óˆ®<ÿ/<TtU¼«K®;ì »Ò°¿<í7§;Øv<_‰Q=Þ=Õu=®‹<±‹½*¸ã¼ƒ3’<¥ù¡½Z/¼v¥ù;ÈH–<	™=· r;þ²þ»uÇê<âé3¼RB-<g¹â¼Ev¹<Ç¢c½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_variance.bin
deleted file mode 100644
index 147e253560b305478e5695bbbf29edad0196efd7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_1_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-Ò–XBË4h@⮹AH;Aà ùA-˜1A‚»ªAa0‹@<.¤?ŠöA@%¨@¬œuAg¬RCU´B¤)C¢DCû.dB
-*GA˜°BÂÏC@,*Ã?/BU¶SB jA@ñqÃAg¿wB`£¸@l@:8{B^ÅBeu·B
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_beta.bin
deleted file mode 100644
index 22de74781e2d5bd8aff0970a3dd772a3f7af8b43..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_gamma.bin
deleted file mode 100644
index f6a45f9321a72c3b75577ed2770f8172068adb14..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_mean.bin
deleted file mode 100644
index fa9745a01e28154ce05dc3fe03993231b2eaa02f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_variance.bin
deleted file mode 100644
index fe73ba3686237a90d0769fc263b75b72056b9c5c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_20_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_beta.bin
deleted file mode 100644
index 9e96170952186e9b86f728cb5af530431f9e7522..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_gamma.bin
deleted file mode 100644
index cef59f6e3216496060f0d26ecdb72160d41cbca0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_mean.bin
deleted file mode 100644
index 647395df46d9c20cd169dff9f5c77c2757575d85..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_variance.bin
deleted file mode 100644
index f87f91130fb9212ccbd5ec53ad220703b94f1862..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_21_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_beta.bin
deleted file mode 100644
index 8c23a1ba8a470c6244a5deaf76a0924953ced6a3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_gamma.bin
deleted file mode 100644
index b33cc77d691731a67fa23e8f951c85e0802bfa92..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_mean.bin
deleted file mode 100644
index 5a2a391ae13bedc847d89f2093f58312cf49d575..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_variance.bin
deleted file mode 100644
index 15bfdce770b2483159d00c36f2baed06031828b7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_22_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_beta.bin
deleted file mode 100644
index 5596ded73379734bd77e452ba94f146c7bedcdc7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_gamma.bin
deleted file mode 100644
index 360a0c778ec76fc5eb7f302dc9ac20d7b73f6e27..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_mean.bin
deleted file mode 100644
index 9c06345315d6c35ebfc73bcb8331cf3d0e0036dd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_variance.bin
deleted file mode 100644
index 6f37b1025f8a92b79518a9e040a8818d21cf69d8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_23_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_beta.bin
deleted file mode 100644
index e53e6a502e79ee372ba7137366a18ced92f2ef47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_gamma.bin
deleted file mode 100644
index 4563fede5177008f93f23041043b66ac1f6dd8b2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_mean.bin
deleted file mode 100644
index 7fdd696f0849675b36af044d77bc2827d550a512..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_variance.bin
deleted file mode 100644
index 84ce37015f205d936c444a4c04ff4127234516b1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_24_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_beta.bin
deleted file mode 100644
index e7b25470ef2619504ef1d3e61fd92725352cfbd2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_gamma.bin
deleted file mode 100644
index e5a8910761e45753683853869fb0a059f56bd739..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_mean.bin
deleted file mode 100644
index 43145b4788f603530808cac9d084f2d4e3271497..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_variance.bin
deleted file mode 100644
index c7aecd55851aa77d48c5f701aa087ebd984e1601..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_25_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_beta.bin
deleted file mode 100644
index 696f0d3025d2184f744393996aa3f16d4173b9c0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_gamma.bin
deleted file mode 100644
index 45c439d6b134fcf84d0f492bd6cf0c361392767a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_mean.bin
deleted file mode 100644
index eb018ac762e96f39534b9bf72d4b76175e28b506..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_variance.bin
deleted file mode 100644
index a07048b126c51d0bdfeacd77232b6beab78a22d8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_26_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_beta.bin
deleted file mode 100644
index 705aa5c7956f6c703889591f8581b5a96b800a39..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_gamma.bin
deleted file mode 100644
index 28ec9b0abc670976b9d125e8f36709248d3bc801..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_mean.bin
deleted file mode 100644
index 057fe484088453b4b85ef8c531e186e4d5703019..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_variance.bin
deleted file mode 100644
index 1c9d93590a2becf151b3ae9a3730692857beabf5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_27_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_beta.bin
deleted file mode 100644
index 046e98c72d249342511a1c5ada63043533e5513f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_gamma.bin
deleted file mode 100644
index 4b746a310b395b5b3b36a5846ae596aba6acb71a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-\£†?Ü®?—¬/?f$H?º~?Ä~?:zV?(®É?M`?]ÍB?Ëåý>ŒpÁ>¡&Z?!d?`‘R?’°J?y2?¹IY?ÌÙr?(ìa?Q¶î?³è}?Ÿx1?dÏÔ>¦‚?|dG?6Ô:?⍥?<Xð?§O<?¨?5÷7?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_mean.bin
deleted file mode 100644
index f160cdf014a977fdf93f4fa03a1da8fc50741724..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_mean.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-°Òú¼ö–½5´Ã?¹`3¿$ ¾×Hνô(/@tˆ>oGx@Ã	³?ËaT@ü§»þ€¿Æxq¿%îë¿KFg>g\}¿{b‚¿'ɾ¼e
-¿R¹‡=îK@îï?˜Á¨?‹à½FÇŠÀàt™>«Vµ>z9¾CáÀv‡§?œòÀ
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_variance.bin
deleted file mode 100644
index 59c0c4e6f02182f5c8da47c061fda25db7eda6f7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_2_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-Z.a>õ*?IrH@\È.?	æt>
Áâ>ðp4A4òæ?žó¬A«hu@ëƒAîèÓ<’¶?¬÷¡?tÍ›@%º¼>Šéë?þà+@Ä›>ÛžÈ?¯«•>ÐH™@qÐ@¦ä4@
—¤>xôBêU>2/š>G?âõÉBïù>@òÛ>A
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_beta.bin
deleted file mode 100644
index 4be5026221b3e708b6f93e4a4b61256c7f114b3f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_beta.bin
+++ /dev/null
@@ -1 +0,0 @@
->LS–>?öß;Þ#>åsx¼*$¾Š|%>;¿\¾…1<õ¦½Œ¨ƒ>Òh>öн‹FÈ=
‡´½n’ó<æF‘½r;+=Ìã3½Ü1À<0¦T>fN÷=œˆ!>PÔƒ>91.>hc=Py½¼/•o½oìÞ;¾Þ»=ú<-íp½:EÚ¼Ññï<àC>2¨ç<ö…=·ƒj¼Ly?@¦½‰ž½TXè=;Ó-=‹ÀO¾~…žºT-›=:ïŠ=Ö­½ÅZ‘>tó=¥M>¢¥7>ß54=Ä&Þ½r¿= ¹µ=Dƒ>b뾺rP>O™>sª¾à£—½4H½27>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_gamma.bin
deleted file mode 100644
index 8a81a868f461b8342cafde739ea216e47e6d5c42..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_gamma.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-”é}?ÖÒy?Ò€?d€?ư??}?Cv?t²{?
-:€?Ô•€?mK~?i}?’·€?¡€?•C~?Åe?¾ƒ€?Îï€?Añ?-´€?o3€?€?UH€?¹!z?Y?-+?æç€?Lõ€?¡$€?ÞG~?ò€?~r?€?‰”€?¥]€?C€?|€?ù+€?b_T?hû|?­ð?@%€?B!€?Âã|?Ϲ€?-¬?º€?õz?Ïd|?²P?Ýü~?5*~?(°?Ô¢~?Â
-€?‹€?V•|?Uë|?L•|?:ß~?ïò~?V€?‚k~?å2€?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_mean.bin
deleted file mode 100644
index ed107d20b940ff9bdb8a02d75a61508a4557226b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-¹4s¿9˜>:Ä–?œŸU½w²8¿;å>šý?ÄM«<Êvý>k7>–Šý¾5¿˜0¥>¿~(?løŸ?̯¿RÈ~¾œ3†¿—"¼?þ#J½°¯à¾kÙ)>5¿]¶¿m¾¿Üò?ü»ä>ps?ÒWu½ê7—>à%?3-"¿,XŸ?ŸÞn¿2F¿g—¾"¿óÕ‰¿ñÆp¾Ž~<?ˆ¢8=Ž}º½Œ
¥>Œµ½C€A>z Û=ubP¿´°O¾‘-¶>†¿îλ¾ÿœÑ¿ø6>¯M?ÞÙ²>ų‡?¼%‘¾¹÷€¾ðп”3a¾ì­ ?ðp>?ä >íîô¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_variance.bin
deleted file mode 100644
index 9182caf4719d0daa967d8a62b3a05d7b8b1e081f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_3_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-…Pd?ÁÁ·?åM(?AÆg?ƒÄ?—?—é‚?4x?h/‚?«;m? ?R?þ&?äÄ/?Ù0™?³6?lÌè?À‹@„¢[?Ï\Û>N£:? ¹B?QàË?n&ž?kv€?ÁúÍ?ZV¬?©AÍ?R/?½´|?šH¨?ªW?}>?€?«Ë?ºr?9‡?÷ÈÆ?áC…?'æ]?Šò§>ÕO@?/øâ>Lü{?§(
-?:é¢?¤Šª?ÃgK?¶c‚?†?D?Tî?w¯?T|2?ßÁ?6)e?uY~?eÊÀ?ª·˜?C¬(?ªž?M4W?ox?\#?®š?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_beta.bin
deleted file mode 100644
index c0cb0b714bfc40d21a95e558a7e9371711240aa9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_gamma.bin
deleted file mode 100644
index 59f96f317915df78a8c67b328569b939a112a417..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-ºÒ’?¤›—?ßS?Œ?+‘i?ÿíy?/h›?Ebk?÷½¤?k^]?@a?o±ƒ?keƒ?›j‹?™?ìï„?E‘‚?¼•[?è?|}?_y?C‹?šíg?_TN?c3?%?(þÇ?ø¥?¨x?>?ŠÊ?žŽX?øƒi?tz?jÙ©?#/>?U1?¹>?ƒ±?š¶(?ú~?Ÿ±/?àû|?¾r?56Š?§‹?˜”?QšQ?û›?‘Ћ?oi?¿¥?k?>z?ôÚ•?}?CF‘?¿¬a?ËÛS?SÁt?;?£÷L?’k?|z?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_mean.bin
deleted file mode 100644
index 9b0d73621e993e34be9696ca963bc6c1c3dab7cc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_variance.bin
deleted file mode 100644
index f1458901b75ae27101b96f4995d3b636f319c877..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_4_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_beta.bin
deleted file mode 100644
index c92948e612111496883f017c07d803d0d8aacf4e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_beta.bin
+++ /dev/null
@@ -1,5 +0,0 @@
-½¿ž<%Y½Ø!(½Ø÷¼S¬¸=âBš=PÔœ»kå¾[ã[=Eìçºð}>l¼¿t-¼pŠ=+.Í;²Y¤¼sgs=º'½·öš=
!½½vнcA=fú^=Va¢½Ï65¼Ô_3=/d¹=Æ^u½ˆ#»—]i½þé#½£µ=PC©=£m8=ºe+½º@=04ûpCž=”-¬½Ò˽Шþ»&=ÉÝ¥¼„(Ñ<E*<<#=È„ì½z=«<o­7=#{E½êoн\Q½£.ª=ë{?=f¬Æ=kç³»ú¹<›–|=â¡=<¬¹=sãF=›=}=M}=1¼</p1=J˜½`¨X½€ÿ>
-¬=Úå+<Œ½μ«¶¼£9æ¼#Ç0<ž‰ˆ½²ˆf=-#<58½´-°¼ØÅ<ƒoë;w–¼8>„
-½/NL={;[Ö¡;.½™rE<O>†¼ŠÂ<îY¨½“!½i¤6=EZk=ÊLª=-Æ»b²;Ácæ¼aãz=O6=;HÕ=Œ½P½(AY>‚Z=(̱<óè—½áõ¼
-Ùƒ½¹á¼º+þt¼»v½«‰C¼l©f=æ˜/=¹@=—­¼~îܼ
-ã<ŸÃò<‰
‡=Á­»’½\õ¡=_êZ=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_gamma.bin
deleted file mode 100644
index 361481684c205ab17d58e527d1acbce391339130..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_mean.bin
deleted file mode 100644
index 09fb4f8c9bb2f013cbc62c5861dd46fd2ac8d200..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_variance.bin
deleted file mode 100644
index 7971e66d67a3efee3b30c333f2cad8f271221217..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_5_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_beta.bin
deleted file mode 100644
index bc7581546520792d894d3375c0c4557ce7ea906b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_gamma.bin
deleted file mode 100644
index b48836f2c0bc9dd05e4fc9a24d545120e3365f02..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_mean.bin
deleted file mode 100644
index 0dbadb684a2e9e6e9eaa1bde81630ee1005a4ae4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_variance.bin
deleted file mode 100644
index 4669fcb9ea5bb21c594cd9e7b19b435b3cf771da..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_6_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_beta.bin
deleted file mode 100644
index 4ade0b3ac8fcb60caa8ed69f782fd0c5ecd7a61d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_gamma.bin
deleted file mode 100644
index 4b32a8a1046b45c453d70756cf05c63d0f07b15e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_mean.bin
deleted file mode 100644
index a53edd2b88c2d60b8e432d2c5b6c0fb37167c3cd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_variance.bin
deleted file mode 100644
index dd0456b69725d1d2d4cfb983c0f22144b323b892..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_7_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_beta.bin
deleted file mode 100644
index 2c60da8cac8bbfd9948d134faf10c128aef2cfbc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_gamma.bin
deleted file mode 100644
index bc3e3a42252f6730172fb898298054316c348780..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_gamma.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-OIy?Ò´†?šy‡?g)?»”Ÿ?w™r?‘±†?‚Vp?
†?p–`?¢fs?™÷n?ór?„Ÿe?Lr‚?/
r?»Øƒ?L»w?ê@Œ?¬C•?Æ.‡?P{?W|?Öwh?¤ƒ?º1y?c?¥@”?ºt?µ‚?_r?!e?Œ^|?´xp?ÑÛq?eßa?¸«?…k?g?xʈ?5s?…‘?ÃÄw?gŽn?Ìy?U$t?ïl‚?ƒªo?³ÐO?˜$m?¯0? Ív?‰?8ìŠ?,Â…?Éb?ÂX‡?‡âl?@ҍ?nI…?£íƒ?¹Š?kõi?ÙQo?®ùs? g?c„?]Ça?D	ƒ?ÏÐk?LQq?e˜Š?pèd?¹¯S?ÆCy?.Þ{?Äâ?Î	o?,™€?RØr?‹Ö?¾®x?jbn?±ª}?74€?”cj?Žð†?H??âc?îa?z·‹?†{?ýw?I³ƒ?«|‚?¸õ‡?¡Y‚?GMj?ÐKe?Oý‚?~?=Ók?:u?„	~?õD`?u‹?9:…?æˆI?{?é,}?ªz?Ž?;e?¯†?gFy?û\d?£b?«qœ?v
-Š?o,e?Ÿnt?z…?
-0›?]o?<³€?¼t_?Û5?WC_?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_mean.bin
deleted file mode 100644
index 5f7cf3dbd74c06b121aed5e2c99f782e1756cce5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_variance.bin
deleted file mode 100644
index 15f85baa9a203a7befc166737eb734e30b2c23e8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_8_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_beta.bin
deleted file mode 100644
index 1182160446288ff96739d0bad78c14832a4534b3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_gamma.bin
deleted file mode 100644
index 5d59ed002e63c0ec38a4137474b80c80884e1e13..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_mean.bin
deleted file mode 100644
index 54c1c6db35512fe9c0dc3267b4efb1163cdcaf5f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_variance.bin
deleted file mode 100644
index 380d1141f6db6540a8857c7296571c32917530b8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/batch_normalization_9_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_10_w.bin
deleted file mode 100644
index be27ee1f468edb2d249344e76168608d477943e9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_11_w.bin
deleted file mode 100644
index 5ee50139a67d44b0fbd3a10982e4a7d8df8cf881..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_12_w.bin
deleted file mode 100644
index 2a6b3b16a213a408bf0ddfa1c33af78d11c750c9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_13_w.bin
deleted file mode 100644
index ab04519349aae65efec45b481eb2008b61ef92b4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_14_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_14_w.bin
deleted file mode 100644
index 3ff46e2f02a916df79c4fb6d942507b0f0480425..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_14_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_1_w.bin
deleted file mode 100644
index beae71db696fa1c94d2d1aa414e5a00988339aa0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_2_w.bin
deleted file mode 100644
index 70b575340869287d376bf7446915a12c19477ff2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_3_w.bin
deleted file mode 100644
index 1ba997c0e2f10b7cade710b0944c7a4e4bdc33c3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_4_w.bin
deleted file mode 100644
index bb3eb3daed943a5569ba8e495ad20e07ee44bc42..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_5_w.bin
deleted file mode 100644
index c1a48e68adc94b86d1b8b1d9fe5694ba736b53b8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_6_w.bin
deleted file mode 100644
index 071ab1b870f25323a55ea8a3285cbe07d63acc86..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_7_w.bin
deleted file mode 100644
index c5d7ad86c15e5779b84c9e1484ab532e98d64d2e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_8_w.bin
deleted file mode 100644
index af71a49608774244f8d88a9a3bd4801da45bb450..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_9_w.bin
deleted file mode 100644
index 7b493fb762b937d29e933eaea5eb8aadd2a654c5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/dense_1_b.bin
deleted file mode 100644
index b97d8e0ae6500627fcf62bc0e4c457bac447317f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/dense_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-hÇ/½•w¨½Ÿ}¿=ÉÎ>DÕ½
ìÔ<Dž8¼˜5«»*ÓϽ1.¶<
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/dense_1_w.bin
deleted file mode 100644
index a3b0cff6a2171c865b11c25dec250405f9461f70..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_10_w.bin
deleted file mode 100644
index 08228dcb28a43558dcacfa7b266d3637138820ca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_11_w.bin
deleted file mode 100644
index 23a8d94cb02f24db23b0df0d91e75ebaf44a6340..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_12_w.bin
deleted file mode 100644
index 33443ad99f79e2bffc561feb3991834b0b18bff1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_13_w.bin
deleted file mode 100644
index 3220f00786bc00db95b25607d8227d7e0b609e84..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_1_w.bin
deleted file mode 100644
index fa5759f7995d75798612674c26f4e08c7d63a63f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_2_w.bin
deleted file mode 100644
index c09ac42fd8f8d804d783964a735336759edce3ea..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_3_w.bin
deleted file mode 100644
index a998b3154ea9a9b210dc6eb943668237558ea129..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_4_w.bin
deleted file mode 100644
index 65ded0e0f26acf54ec10621b3f89857c574ba565..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_5_w.bin
deleted file mode 100644
index e4552fc09ed42accc926a9d5592f37dd1d6a5e10..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_6_w.bin
deleted file mode 100644
index 43731484ba19242c3248f8b4586e3f314e8440d6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_7_w.bin
deleted file mode 100644
index 5db95dbb276a64563a8fbb4db3bfa17eaa49fe0b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_8_w.bin
deleted file mode 100644
index 7c3d9be67d904f38048d601d9d710a7f48f116f2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_9_w.bin
deleted file mode 100644
index 91a5954f2b2bef0fd1131195559e53ffea90d316..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/depthwise_conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/input.bin
deleted file mode 100644
index 7a6fbc28f5a947a90863278a5249303f9f52741b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/src.cc
deleted file mode 100644
index aad069dc8cc5fb8613987e78fc5313c23141f6de..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_data/src.cc
+++ /dev/null
@@ -1,413 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-  llvm_hpvm_initTensorRt(1); 
-
-
-  std::string dir_prefix = std::string("data/mobilenet_hpvm_5/"); 
-  std::string input_path =  dir_prefix + std::string("input.bin"); 
-  std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-  std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-  void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-  std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-  void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-  void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-  void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-  void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-  std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-  void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-  std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-  void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-  void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-  void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-  std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-  void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-  std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-  void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-  std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-  void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-  void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-  void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-  void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-  std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-  void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-  std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-  void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-  void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-  void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-  std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-  void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-  std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-  void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-  std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-  void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-  void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-  void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-  void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-  void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-  void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-  void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-  void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-  void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-  void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-  std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-  void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-  void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-  void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-  void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-  std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-  void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-  std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-  void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-  void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-  void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-  std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-  void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-  std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-  void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-  std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-  void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-  void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-  void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-  void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-  void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-  void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-  void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-  void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-  void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-  void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-  std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-  void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-  void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-  void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-  void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-  std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-  void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-  std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-  void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-  void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-  void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-  std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-  void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-  std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-  void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-  std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-  void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-  void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-  void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-  void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-  void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-  void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-  void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-  void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-  void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-  void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-  void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-  void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-  void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-  void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-  void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-  void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-  void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-  void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-  void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-  void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-  void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-  void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-  void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-  void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-  void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-  void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-  void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-  void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-  void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-  void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-  void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-  void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-  void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-  void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-  void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-  void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-  void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-  void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-  void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-  void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-  void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-  void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-  void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-  void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-  void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-  void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-  void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-  void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-  void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-  void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-  std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-  void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-  void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-  void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-  void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-  std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-  void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-  std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-  void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-  void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-  void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-  std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-  void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-  std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-  void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-  std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-  void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-  void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-  void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-  void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-  void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-  std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-  void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-  void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-  void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-  void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-  void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-  std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-  void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-  void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-  void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-  std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-  void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-  std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-  void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-  std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-  void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-  startMemTracking(); 
-
-  int test_input_size = 2000; 
-  int batch_size = 2000; 
-  int batch_count = test_input_size / batch_size; 
-  float final_accuracy = 0.0; 
-
-  for(int i = 0; i < batch_count; i++){ 
-
-    int start = i * batch_size; 
-    int end = (i + 1) * batch_size; 
-
-    void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-    void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-    void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-    void* var_2 = tensorRelu(var_1); 
-    void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-    void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-    void* var_5 = tensorRelu(var_4); 
-    void* var_6 = tensorConvolution(var_5, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-    void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-    void* var_8 = tensorRelu(var_7); 
-    void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-    void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-    void* var_11 = tensorRelu(var_10); 
-    void* var_12 = tensorConvolution(var_11, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-    void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-    void* var_14 = tensorRelu(var_13); 
-    void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-    void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-    void* var_17 = tensorRelu(var_16); 
-    void* var_18 = tensorConvolution(var_17, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-    void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-    void* var_20 = tensorRelu(var_19); 
-    void* var_22 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-    void* var_23 = tensorBatchNorm(var_22, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-    void* var_24 = tensorRelu(var_23); 
-    void* var_25 = tensorConvolution(var_24, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-    void* var_26 = tensorBatchNorm(var_25, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-    void* var_27 = tensorRelu(var_26); 
-    void* var_28 = tensorConvolution(var_27, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-    void* var_29 = tensorBatchNorm(var_28, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-    void* var_30 = tensorRelu(var_29); 
-    void* var_31 = tensorConvolution(var_30, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-    void* var_32 = tensorBatchNorm(var_31, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-    void* var_33 = tensorRelu(var_32); 
-    void* var_35 = tensorConvolution(var_33, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-    void* var_36 = tensorBatchNorm(var_35, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-    void* var_37 = tensorRelu(var_36); 
-    void* var_38 = tensorConvolution(var_37, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-    void* var_39 = tensorBatchNorm(var_38, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-    void* var_40 = tensorRelu(var_39); 
-    void* var_41 = tensorConvolution(var_40, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-    void* var_42 = tensorBatchNorm(var_41, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-    void* var_43 = tensorRelu(var_42); 
-    void* var_44 = tensorConvolution(var_43, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-    void* var_45 = tensorBatchNorm(var_44, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-    void* var_46 = tensorRelu(var_45); 
-    void* var_47 = tensorConvolution(var_46, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-    void* var_48 = tensorBatchNorm(var_47, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-    void* var_49 = tensorRelu(var_48); 
-    void* var_50 = tensorConvolution(var_49, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-    void* var_51 = tensorBatchNorm(var_50, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-    void* var_52 = tensorRelu(var_51); 
-    void* var_54 = tensorConvolution(var_52, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-    void* var_55 = tensorBatchNorm(var_54, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-    void* var_56 = tensorRelu(var_55); 
-    void* var_57 = tensorConvolution(var_56, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-    void* var_58 = tensorBatchNorm(var_57, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-    void* var_59 = tensorRelu(var_58); 
-    void* var_60 = tensorConvolution(var_59, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-    void* var_61 = tensorBatchNorm(var_60, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-    void* var_62 = tensorRelu(var_61); 
-    void* var_63 = tensorConvolution(var_62, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-    void* var_64 = tensorBatchNorm(var_63, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-    void* var_65 = tensorRelu(var_64); 
-    void* var_66 = tensorConvolution(var_65, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-    void* var_67 = tensorBatchNorm(var_66, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-    void* var_68 = tensorRelu(var_67); 
-    void* var_69 = tensorConvolution(var_68, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-    void* var_70 = tensorBatchNorm(var_69, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-    void* var_71 = tensorRelu(var_70); 
-    void* var_73 = tensorConvolution(var_71, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-    void* var_74 = tensorBatchNorm(var_73, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-    void* var_75 = tensorRelu(var_74); 
-    void* var_76 = tensorConvolution(var_75, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-    void* var_77 = tensorBatchNorm(var_76, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-    void* var_78 = tensorRelu(var_77); 
-    void* var_79 = tensorConvolution(var_78, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-    void* var_80 = tensorBatchNorm(var_79, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-    void* var_81 = tensorRelu(var_80); 
-    void* var_82 = tensorConvolution(var_81, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-    void* var_83 = tensorBatchNorm(var_82, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-    void* var_84 = tensorRelu(var_83); 
-    void* var_86 = tensorPooling(var_84,1,2,2,0,0,2,2); 
-    void* var_88 = tensorGemmGPU(var_86, dense_1_w); 
-    void* var_89 = tensorAdd(var_88, dense_1_b); 
-    void* var_90 = tensorSoftmax(var_89); 
-
-    uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-    float accuracy = computeAccuracy2(labels, batch_size, var_90); 
-    final_accuracy += accuracy; 
-    freeBatchMemory(); 
- 
-  }
-
-  final_accuracy = final_accuracy / batch_count; 
-  dumpFinalAccuracy(final_accuracy); 
-
-
-  llvm_hpvm_cleanupTensorRt(); 
-
-  return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/approxhpvm_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/approxhpvm_src.cc
deleted file mode 100644
index 5089eb912bcb5335c96c04f6d98f5d17ab761c72..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/approxhpvm_src.cc
+++ /dev/null
@@ -1,2400 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/stat.h> 
-#include <cstring> 
-#include <visc.h> 
-#include <tensorTypes.h> 
-#include <tensorUtils.h> 
-
-void var_0_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_1_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_2_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_3_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 32); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_4_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_5_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_6_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_7_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_8_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_9_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 64); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_10_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_11_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_12_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_13_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_14_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_15_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 128); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_16_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_17_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_18_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_19_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_20_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_21_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 128); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_22_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_23_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_24_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_25_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_26_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_27_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 256); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_28_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_29_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_30_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_31_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_32_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_33_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 256); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_34_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_35_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_36_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_37_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_38_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_39_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_40_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_41_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_42_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_43_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_44_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_45_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_46_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_47_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_48_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_49_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_50_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_51_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_52_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_53_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_54_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_55_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_56_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_57_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_58_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_59_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_60_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_61_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_62_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_63_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_64_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_65_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_66_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_67_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_68_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_69_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 512); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_70_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_71_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_72_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_73_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_74_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_75_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 1024); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_76_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_77_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_78_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_79_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_80_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_81_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_avg(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_82_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_mul(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_83_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_84_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_softmax(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void root(void* input, size_t input_bytes, 
-	  void* conv2d_1_w, size_t conv2d_1_w_bytes, 
-	  void* batch_normalization_1_gamma, size_t batch_normalization_1_gamma_bytes, 
-	  void* batch_normalization_1_beta, size_t batch_normalization_1_beta_bytes, 
-	  void* batch_normalization_1_mean, size_t batch_normalization_1_mean_bytes, 
-	  void* batch_normalization_1_variance, size_t batch_normalization_1_variance_bytes, 
-	  void* depthwise_conv2d_1_w, size_t depthwise_conv2d_1_w_bytes, 
-	  void* batch_normalization_2_gamma, size_t batch_normalization_2_gamma_bytes, 
-	  void* batch_normalization_2_beta, size_t batch_normalization_2_beta_bytes, 
-	  void* batch_normalization_2_mean, size_t batch_normalization_2_mean_bytes, 
-	  void* batch_normalization_2_variance, size_t batch_normalization_2_variance_bytes, 
-	  void* conv2d_2_w, size_t conv2d_2_w_bytes, 
-	  void* batch_normalization_3_gamma, size_t batch_normalization_3_gamma_bytes, 
-	  void* batch_normalization_3_beta, size_t batch_normalization_3_beta_bytes, 
-	  void* batch_normalization_3_mean, size_t batch_normalization_3_mean_bytes, 
-	  void* batch_normalization_3_variance, size_t batch_normalization_3_variance_bytes, 
-	  void* depthwise_conv2d_2_w, size_t depthwise_conv2d_2_w_bytes, 
-	  void* batch_normalization_4_gamma, size_t batch_normalization_4_gamma_bytes, 
-	  void* batch_normalization_4_beta, size_t batch_normalization_4_beta_bytes, 
-	  void* batch_normalization_4_mean, size_t batch_normalization_4_mean_bytes, 
-	  void* batch_normalization_4_variance, size_t batch_normalization_4_variance_bytes, 
-	  void* conv2d_3_w, size_t conv2d_3_w_bytes, 
-	  void* batch_normalization_5_gamma, size_t batch_normalization_5_gamma_bytes, 
-	  void* batch_normalization_5_beta, size_t batch_normalization_5_beta_bytes, 
-	  void* batch_normalization_5_mean, size_t batch_normalization_5_mean_bytes, 
-	  void* batch_normalization_5_variance, size_t batch_normalization_5_variance_bytes, 
-	  void* depthwise_conv2d_3_w, size_t depthwise_conv2d_3_w_bytes, 
-	  void* batch_normalization_6_gamma, size_t batch_normalization_6_gamma_bytes, 
-	  void* batch_normalization_6_beta, size_t batch_normalization_6_beta_bytes, 
-	  void* batch_normalization_6_mean, size_t batch_normalization_6_mean_bytes, 
-	  void* batch_normalization_6_variance, size_t batch_normalization_6_variance_bytes, 
-	  void* conv2d_4_w, size_t conv2d_4_w_bytes, 
-	  void* batch_normalization_7_gamma, size_t batch_normalization_7_gamma_bytes, 
-	  void* batch_normalization_7_beta, size_t batch_normalization_7_beta_bytes, 
-	  void* batch_normalization_7_mean, size_t batch_normalization_7_mean_bytes, 
-	  void* batch_normalization_7_variance, size_t batch_normalization_7_variance_bytes, 
-	  void* depthwise_conv2d_4_w, size_t depthwise_conv2d_4_w_bytes, 
-	  void* batch_normalization_8_gamma, size_t batch_normalization_8_gamma_bytes, 
-	  void* batch_normalization_8_beta, size_t batch_normalization_8_beta_bytes, 
-	  void* batch_normalization_8_mean, size_t batch_normalization_8_mean_bytes, 
-	  void* batch_normalization_8_variance, size_t batch_normalization_8_variance_bytes, 
-	  void* conv2d_5_w, size_t conv2d_5_w_bytes, 
-	  void* batch_normalization_9_gamma, size_t batch_normalization_9_gamma_bytes, 
-	  void* batch_normalization_9_beta, size_t batch_normalization_9_beta_bytes, 
-	  void* batch_normalization_9_mean, size_t batch_normalization_9_mean_bytes, 
-	  void* batch_normalization_9_variance, size_t batch_normalization_9_variance_bytes, 
-	  void* depthwise_conv2d_5_w, size_t depthwise_conv2d_5_w_bytes, 
-	  void* batch_normalization_10_gamma, size_t batch_normalization_10_gamma_bytes, 
-	  void* batch_normalization_10_beta, size_t batch_normalization_10_beta_bytes, 
-	  void* batch_normalization_10_mean, size_t batch_normalization_10_mean_bytes, 
-	  void* batch_normalization_10_variance, size_t batch_normalization_10_variance_bytes, 
-	  void* conv2d_6_w, size_t conv2d_6_w_bytes, 
-	  void* batch_normalization_11_gamma, size_t batch_normalization_11_gamma_bytes, 
-	  void* batch_normalization_11_beta, size_t batch_normalization_11_beta_bytes, 
-	  void* batch_normalization_11_mean, size_t batch_normalization_11_mean_bytes, 
-	  void* batch_normalization_11_variance, size_t batch_normalization_11_variance_bytes, 
-	  void* depthwise_conv2d_6_w, size_t depthwise_conv2d_6_w_bytes, 
-	  void* batch_normalization_12_gamma, size_t batch_normalization_12_gamma_bytes, 
-	  void* batch_normalization_12_beta, size_t batch_normalization_12_beta_bytes, 
-	  void* batch_normalization_12_mean, size_t batch_normalization_12_mean_bytes, 
-	  void* batch_normalization_12_variance, size_t batch_normalization_12_variance_bytes, 
-	  void* conv2d_7_w, size_t conv2d_7_w_bytes, 
-	  void* batch_normalization_13_gamma, size_t batch_normalization_13_gamma_bytes, 
-	  void* batch_normalization_13_beta, size_t batch_normalization_13_beta_bytes, 
-	  void* batch_normalization_13_mean, size_t batch_normalization_13_mean_bytes, 
-	  void* batch_normalization_13_variance, size_t batch_normalization_13_variance_bytes, 
-	  void* depthwise_conv2d_7_w, size_t depthwise_conv2d_7_w_bytes, 
-	  void* batch_normalization_14_gamma, size_t batch_normalization_14_gamma_bytes, 
-	  void* batch_normalization_14_beta, size_t batch_normalization_14_beta_bytes, 
-	  void* batch_normalization_14_mean, size_t batch_normalization_14_mean_bytes, 
-	  void* batch_normalization_14_variance, size_t batch_normalization_14_variance_bytes, 
-	  void* conv2d_8_w, size_t conv2d_8_w_bytes, 
-	  void* batch_normalization_15_gamma, size_t batch_normalization_15_gamma_bytes, 
-	  void* batch_normalization_15_beta, size_t batch_normalization_15_beta_bytes, 
-	  void* batch_normalization_15_mean, size_t batch_normalization_15_mean_bytes, 
-	  void* batch_normalization_15_variance, size_t batch_normalization_15_variance_bytes, 
-	  void* depthwise_conv2d_8_w, size_t depthwise_conv2d_8_w_bytes, 
-	  void* batch_normalization_16_gamma, size_t batch_normalization_16_gamma_bytes, 
-	  void* batch_normalization_16_beta, size_t batch_normalization_16_beta_bytes, 
-	  void* batch_normalization_16_mean, size_t batch_normalization_16_mean_bytes, 
-	  void* batch_normalization_16_variance, size_t batch_normalization_16_variance_bytes, 
-	  void* conv2d_9_w, size_t conv2d_9_w_bytes, 
-	  void* batch_normalization_17_gamma, size_t batch_normalization_17_gamma_bytes, 
-	  void* batch_normalization_17_beta, size_t batch_normalization_17_beta_bytes, 
-	  void* batch_normalization_17_mean, size_t batch_normalization_17_mean_bytes, 
-	  void* batch_normalization_17_variance, size_t batch_normalization_17_variance_bytes, 
-	  void* depthwise_conv2d_9_w, size_t depthwise_conv2d_9_w_bytes, 
-	  void* batch_normalization_18_gamma, size_t batch_normalization_18_gamma_bytes, 
-	  void* batch_normalization_18_beta, size_t batch_normalization_18_beta_bytes, 
-	  void* batch_normalization_18_mean, size_t batch_normalization_18_mean_bytes, 
-	  void* batch_normalization_18_variance, size_t batch_normalization_18_variance_bytes, 
-	  void* conv2d_10_w, size_t conv2d_10_w_bytes, 
-	  void* batch_normalization_19_gamma, size_t batch_normalization_19_gamma_bytes, 
-	  void* batch_normalization_19_beta, size_t batch_normalization_19_beta_bytes, 
-	  void* batch_normalization_19_mean, size_t batch_normalization_19_mean_bytes, 
-	  void* batch_normalization_19_variance, size_t batch_normalization_19_variance_bytes, 
-	  void* depthwise_conv2d_10_w, size_t depthwise_conv2d_10_w_bytes, 
-	  void* batch_normalization_20_gamma, size_t batch_normalization_20_gamma_bytes, 
-	  void* batch_normalization_20_beta, size_t batch_normalization_20_beta_bytes, 
-	  void* batch_normalization_20_mean, size_t batch_normalization_20_mean_bytes, 
-	  void* batch_normalization_20_variance, size_t batch_normalization_20_variance_bytes, 
-	  void* conv2d_11_w, size_t conv2d_11_w_bytes, 
-	  void* batch_normalization_21_gamma, size_t batch_normalization_21_gamma_bytes, 
-	  void* batch_normalization_21_beta, size_t batch_normalization_21_beta_bytes, 
-	  void* batch_normalization_21_mean, size_t batch_normalization_21_mean_bytes, 
-	  void* batch_normalization_21_variance, size_t batch_normalization_21_variance_bytes, 
-	  void* depthwise_conv2d_11_w, size_t depthwise_conv2d_11_w_bytes, 
-	  void* batch_normalization_22_gamma, size_t batch_normalization_22_gamma_bytes, 
-	  void* batch_normalization_22_beta, size_t batch_normalization_22_beta_bytes, 
-	  void* batch_normalization_22_mean, size_t batch_normalization_22_mean_bytes, 
-	  void* batch_normalization_22_variance, size_t batch_normalization_22_variance_bytes, 
-	  void* conv2d_12_w, size_t conv2d_12_w_bytes, 
-	  void* batch_normalization_23_gamma, size_t batch_normalization_23_gamma_bytes, 
-	  void* batch_normalization_23_beta, size_t batch_normalization_23_beta_bytes, 
-	  void* batch_normalization_23_mean, size_t batch_normalization_23_mean_bytes, 
-	  void* batch_normalization_23_variance, size_t batch_normalization_23_variance_bytes, 
-	  void* depthwise_conv2d_12_w, size_t depthwise_conv2d_12_w_bytes, 
-	  void* batch_normalization_24_gamma, size_t batch_normalization_24_gamma_bytes, 
-	  void* batch_normalization_24_beta, size_t batch_normalization_24_beta_bytes, 
-	  void* batch_normalization_24_mean, size_t batch_normalization_24_mean_bytes, 
-	  void* batch_normalization_24_variance, size_t batch_normalization_24_variance_bytes, 
-	  void* conv2d_13_w, size_t conv2d_13_w_bytes, 
-	  void* batch_normalization_25_gamma, size_t batch_normalization_25_gamma_bytes, 
-	  void* batch_normalization_25_beta, size_t batch_normalization_25_beta_bytes, 
-	  void* batch_normalization_25_mean, size_t batch_normalization_25_mean_bytes, 
-	  void* batch_normalization_25_variance, size_t batch_normalization_25_variance_bytes, 
-	  void* depthwise_conv2d_13_w, size_t depthwise_conv2d_13_w_bytes, 
-	  void* batch_normalization_26_gamma, size_t batch_normalization_26_gamma_bytes, 
-	  void* batch_normalization_26_beta, size_t batch_normalization_26_beta_bytes, 
-	  void* batch_normalization_26_mean, size_t batch_normalization_26_mean_bytes, 
-	  void* batch_normalization_26_variance, size_t batch_normalization_26_variance_bytes, 
-	  void* conv2d_14_w, size_t conv2d_14_w_bytes, 
-	  void* batch_normalization_27_gamma, size_t batch_normalization_27_gamma_bytes, 
-	  void* batch_normalization_27_beta, size_t batch_normalization_27_beta_bytes, 
-	  void* batch_normalization_27_mean, size_t batch_normalization_27_mean_bytes, 
-	  void* batch_normalization_27_variance, size_t batch_normalization_27_variance_bytes, 
-	  void* dense_1_w, size_t dense_1_w_bytes, 
-	  void* dense_1_b, size_t dense_1_b_bytes){ 
-
-
-  __visc__hint(visc::CPU_TARGET); 
-  __visc__attributes(138, input, conv2d_1_w, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, depthwise_conv2d_1_w, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, conv2d_2_w, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, depthwise_conv2d_2_w, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, conv2d_3_w, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, depthwise_conv2d_3_w, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, conv2d_4_w, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, depthwise_conv2d_4_w, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, conv2d_5_w, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, depthwise_conv2d_5_w, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, conv2d_6_w, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, depthwise_conv2d_6_w, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, conv2d_7_w, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, depthwise_conv2d_7_w, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, conv2d_8_w, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, depthwise_conv2d_8_w, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, conv2d_9_w, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, depthwise_conv2d_9_w, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, conv2d_10_w, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, depthwise_conv2d_10_w, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, conv2d_11_w, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, depthwise_conv2d_11_w, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, conv2d_12_w, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, depthwise_conv2d_12_w, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, conv2d_13_w, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, depthwise_conv2d_13_w, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, conv2d_14_w, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, dense_1_w, dense_1_b, 0); 
-
-
-  void* var_0 = __visc__createNodeND(0, var_0_node); 
-
-  __visc__bindIn(var_0, 0, 0, 0); 
-  __visc__bindIn(var_0, 1, 1, 0); 
-  __visc__bindIn(var_0, 2, 2, 0); 
-  __visc__bindIn(var_0, 3, 3, 0); 
-
-  void* var_1 = __visc__createNodeND(0, var_1_node); 
-
-  __visc__edge(var_0, var_1, 1, 0, 0, 0); 
-  __visc__edge(var_0, var_1, 1, 1, 1, 0); 
-  __visc__bindIn(var_1, 4, 2, 0); 
-  __visc__bindIn(var_1, 5, 3, 0); 
-  __visc__bindIn(var_1, 6, 4, 0); 
-  __visc__bindIn(var_1, 7, 5, 0); 
-  __visc__bindIn(var_1, 8, 6, 0); 
-  __visc__bindIn(var_1, 9, 7, 0); 
-  __visc__bindIn(var_1, 10, 8, 0); 
-  __visc__bindIn(var_1, 11, 9, 0); 
-
-  void* var_2 = __visc__createNodeND(0, var_2_node); 
-
-  __visc__edge(var_1, var_2, 1, 0, 0, 0); 
-  __visc__edge(var_1, var_2, 1, 1, 1, 0); 
-
-  void* var_3 = __visc__createNodeND(0, var_3_node); 
-
-  __visc__edge(var_2, var_3, 1, 0, 0, 0); 
-  __visc__edge(var_2, var_3, 1, 1, 1, 0); 
-  __visc__bindIn(var_3, 12, 2, 0); 
-  __visc__bindIn(var_3, 13, 3, 0); 
-
-  void* var_4 = __visc__createNodeND(0, var_4_node); 
-
-  __visc__edge(var_3, var_4, 1, 0, 0, 0); 
-  __visc__edge(var_3, var_4, 1, 1, 1, 0); 
-  __visc__bindIn(var_4, 14, 2, 0); 
-  __visc__bindIn(var_4, 15, 3, 0); 
-  __visc__bindIn(var_4, 16, 4, 0); 
-  __visc__bindIn(var_4, 17, 5, 0); 
-  __visc__bindIn(var_4, 18, 6, 0); 
-  __visc__bindIn(var_4, 19, 7, 0); 
-  __visc__bindIn(var_4, 20, 8, 0); 
-  __visc__bindIn(var_4, 21, 9, 0); 
-
-  void* var_5 = __visc__createNodeND(0, var_5_node); 
-
-  __visc__edge(var_4, var_5, 1, 0, 0, 0); 
-  __visc__edge(var_4, var_5, 1, 1, 1, 0); 
-
-  void* var_6 = __visc__createNodeND(0, var_6_node); 
-
-  __visc__edge(var_5, var_6, 1, 0, 0, 0); 
-  __visc__edge(var_5, var_6, 1, 1, 1, 0); 
-  __visc__bindIn(var_6, 22, 2, 0); 
-  __visc__bindIn(var_6, 23, 3, 0); 
-
-  void* var_7 = __visc__createNodeND(0, var_7_node); 
-
-  __visc__edge(var_6, var_7, 1, 0, 0, 0); 
-  __visc__edge(var_6, var_7, 1, 1, 1, 0); 
-  __visc__bindIn(var_7, 24, 2, 0); 
-  __visc__bindIn(var_7, 25, 3, 0); 
-  __visc__bindIn(var_7, 26, 4, 0); 
-  __visc__bindIn(var_7, 27, 5, 0); 
-  __visc__bindIn(var_7, 28, 6, 0); 
-  __visc__bindIn(var_7, 29, 7, 0); 
-  __visc__bindIn(var_7, 30, 8, 0); 
-  __visc__bindIn(var_7, 31, 9, 0); 
-
-  void* var_8 = __visc__createNodeND(0, var_8_node); 
-
-  __visc__edge(var_7, var_8, 1, 0, 0, 0); 
-  __visc__edge(var_7, var_8, 1, 1, 1, 0); 
-
-  void* var_9 = __visc__createNodeND(0, var_9_node); 
-
-  __visc__edge(var_8, var_9, 1, 0, 0, 0); 
-  __visc__edge(var_8, var_9, 1, 1, 1, 0); 
-  __visc__bindIn(var_9, 32, 2, 0); 
-  __visc__bindIn(var_9, 33, 3, 0); 
-
-  void* var_10 = __visc__createNodeND(0, var_10_node); 
-
-  __visc__edge(var_9, var_10, 1, 0, 0, 0); 
-  __visc__edge(var_9, var_10, 1, 1, 1, 0); 
-  __visc__bindIn(var_10, 34, 2, 0); 
-  __visc__bindIn(var_10, 35, 3, 0); 
-  __visc__bindIn(var_10, 36, 4, 0); 
-  __visc__bindIn(var_10, 37, 5, 0); 
-  __visc__bindIn(var_10, 38, 6, 0); 
-  __visc__bindIn(var_10, 39, 7, 0); 
-  __visc__bindIn(var_10, 40, 8, 0); 
-  __visc__bindIn(var_10, 41, 9, 0); 
-
-  void* var_11 = __visc__createNodeND(0, var_11_node); 
-
-  __visc__edge(var_10, var_11, 1, 0, 0, 0); 
-  __visc__edge(var_10, var_11, 1, 1, 1, 0); 
-
-  void* var_12 = __visc__createNodeND(0, var_12_node); 
-
-  __visc__edge(var_11, var_12, 1, 0, 0, 0); 
-  __visc__edge(var_11, var_12, 1, 1, 1, 0); 
-  __visc__bindIn(var_12, 42, 2, 0); 
-  __visc__bindIn(var_12, 43, 3, 0); 
-
-  void* var_13 = __visc__createNodeND(0, var_13_node); 
-
-  __visc__edge(var_12, var_13, 1, 0, 0, 0); 
-  __visc__edge(var_12, var_13, 1, 1, 1, 0); 
-  __visc__bindIn(var_13, 44, 2, 0); 
-  __visc__bindIn(var_13, 45, 3, 0); 
-  __visc__bindIn(var_13, 46, 4, 0); 
-  __visc__bindIn(var_13, 47, 5, 0); 
-  __visc__bindIn(var_13, 48, 6, 0); 
-  __visc__bindIn(var_13, 49, 7, 0); 
-  __visc__bindIn(var_13, 50, 8, 0); 
-  __visc__bindIn(var_13, 51, 9, 0); 
-
-  void* var_14 = __visc__createNodeND(0, var_14_node); 
-
-  __visc__edge(var_13, var_14, 1, 0, 0, 0); 
-  __visc__edge(var_13, var_14, 1, 1, 1, 0); 
-
-  void* var_15 = __visc__createNodeND(0, var_15_node); 
-
-  __visc__edge(var_14, var_15, 1, 0, 0, 0); 
-  __visc__edge(var_14, var_15, 1, 1, 1, 0); 
-  __visc__bindIn(var_15, 52, 2, 0); 
-  __visc__bindIn(var_15, 53, 3, 0); 
-
-  void* var_16 = __visc__createNodeND(0, var_16_node); 
-
-  __visc__edge(var_15, var_16, 1, 0, 0, 0); 
-  __visc__edge(var_15, var_16, 1, 1, 1, 0); 
-  __visc__bindIn(var_16, 54, 2, 0); 
-  __visc__bindIn(var_16, 55, 3, 0); 
-  __visc__bindIn(var_16, 56, 4, 0); 
-  __visc__bindIn(var_16, 57, 5, 0); 
-  __visc__bindIn(var_16, 58, 6, 0); 
-  __visc__bindIn(var_16, 59, 7, 0); 
-  __visc__bindIn(var_16, 60, 8, 0); 
-  __visc__bindIn(var_16, 61, 9, 0); 
-
-  void* var_17 = __visc__createNodeND(0, var_17_node); 
-
-  __visc__edge(var_16, var_17, 1, 0, 0, 0); 
-  __visc__edge(var_16, var_17, 1, 1, 1, 0); 
-
-  void* var_18 = __visc__createNodeND(0, var_18_node); 
-
-  __visc__edge(var_17, var_18, 1, 0, 0, 0); 
-  __visc__edge(var_17, var_18, 1, 1, 1, 0); 
-  __visc__bindIn(var_18, 62, 2, 0); 
-  __visc__bindIn(var_18, 63, 3, 0); 
-
-  void* var_19 = __visc__createNodeND(0, var_19_node); 
-
-  __visc__edge(var_18, var_19, 1, 0, 0, 0); 
-  __visc__edge(var_18, var_19, 1, 1, 1, 0); 
-  __visc__bindIn(var_19, 64, 2, 0); 
-  __visc__bindIn(var_19, 65, 3, 0); 
-  __visc__bindIn(var_19, 66, 4, 0); 
-  __visc__bindIn(var_19, 67, 5, 0); 
-  __visc__bindIn(var_19, 68, 6, 0); 
-  __visc__bindIn(var_19, 69, 7, 0); 
-  __visc__bindIn(var_19, 70, 8, 0); 
-  __visc__bindIn(var_19, 71, 9, 0); 
-
-  void* var_20 = __visc__createNodeND(0, var_20_node); 
-
-  __visc__edge(var_19, var_20, 1, 0, 0, 0); 
-  __visc__edge(var_19, var_20, 1, 1, 1, 0); 
-
-  void* var_21 = __visc__createNodeND(0, var_21_node); 
-
-  __visc__edge(var_20, var_21, 1, 0, 0, 0); 
-  __visc__edge(var_20, var_21, 1, 1, 1, 0); 
-  __visc__bindIn(var_21, 72, 2, 0); 
-  __visc__bindIn(var_21, 73, 3, 0); 
-
-  void* var_22 = __visc__createNodeND(0, var_22_node); 
-
-  __visc__edge(var_21, var_22, 1, 0, 0, 0); 
-  __visc__edge(var_21, var_22, 1, 1, 1, 0); 
-  __visc__bindIn(var_22, 74, 2, 0); 
-  __visc__bindIn(var_22, 75, 3, 0); 
-  __visc__bindIn(var_22, 76, 4, 0); 
-  __visc__bindIn(var_22, 77, 5, 0); 
-  __visc__bindIn(var_22, 78, 6, 0); 
-  __visc__bindIn(var_22, 79, 7, 0); 
-  __visc__bindIn(var_22, 80, 8, 0); 
-  __visc__bindIn(var_22, 81, 9, 0); 
-
-  void* var_23 = __visc__createNodeND(0, var_23_node); 
-
-  __visc__edge(var_22, var_23, 1, 0, 0, 0); 
-  __visc__edge(var_22, var_23, 1, 1, 1, 0); 
-
-  void* var_24 = __visc__createNodeND(0, var_24_node); 
-
-  __visc__edge(var_23, var_24, 1, 0, 0, 0); 
-  __visc__edge(var_23, var_24, 1, 1, 1, 0); 
-  __visc__bindIn(var_24, 82, 2, 0); 
-  __visc__bindIn(var_24, 83, 3, 0); 
-
-  void* var_25 = __visc__createNodeND(0, var_25_node); 
-
-  __visc__edge(var_24, var_25, 1, 0, 0, 0); 
-  __visc__edge(var_24, var_25, 1, 1, 1, 0); 
-  __visc__bindIn(var_25, 84, 2, 0); 
-  __visc__bindIn(var_25, 85, 3, 0); 
-  __visc__bindIn(var_25, 86, 4, 0); 
-  __visc__bindIn(var_25, 87, 5, 0); 
-  __visc__bindIn(var_25, 88, 6, 0); 
-  __visc__bindIn(var_25, 89, 7, 0); 
-  __visc__bindIn(var_25, 90, 8, 0); 
-  __visc__bindIn(var_25, 91, 9, 0); 
-
-  void* var_26 = __visc__createNodeND(0, var_26_node); 
-
-  __visc__edge(var_25, var_26, 1, 0, 0, 0); 
-  __visc__edge(var_25, var_26, 1, 1, 1, 0); 
-
-  void* var_27 = __visc__createNodeND(0, var_27_node); 
-
-  __visc__edge(var_26, var_27, 1, 0, 0, 0); 
-  __visc__edge(var_26, var_27, 1, 1, 1, 0); 
-  __visc__bindIn(var_27, 92, 2, 0); 
-  __visc__bindIn(var_27, 93, 3, 0); 
-
-  void* var_28 = __visc__createNodeND(0, var_28_node); 
-
-  __visc__edge(var_27, var_28, 1, 0, 0, 0); 
-  __visc__edge(var_27, var_28, 1, 1, 1, 0); 
-  __visc__bindIn(var_28, 94, 2, 0); 
-  __visc__bindIn(var_28, 95, 3, 0); 
-  __visc__bindIn(var_28, 96, 4, 0); 
-  __visc__bindIn(var_28, 97, 5, 0); 
-  __visc__bindIn(var_28, 98, 6, 0); 
-  __visc__bindIn(var_28, 99, 7, 0); 
-  __visc__bindIn(var_28, 100, 8, 0); 
-  __visc__bindIn(var_28, 101, 9, 0); 
-
-  void* var_29 = __visc__createNodeND(0, var_29_node); 
-
-  __visc__edge(var_28, var_29, 1, 0, 0, 0); 
-  __visc__edge(var_28, var_29, 1, 1, 1, 0); 
-
-  void* var_30 = __visc__createNodeND(0, var_30_node); 
-
-  __visc__edge(var_29, var_30, 1, 0, 0, 0); 
-  __visc__edge(var_29, var_30, 1, 1, 1, 0); 
-  __visc__bindIn(var_30, 102, 2, 0); 
-  __visc__bindIn(var_30, 103, 3, 0); 
-
-  void* var_31 = __visc__createNodeND(0, var_31_node); 
-
-  __visc__edge(var_30, var_31, 1, 0, 0, 0); 
-  __visc__edge(var_30, var_31, 1, 1, 1, 0); 
-  __visc__bindIn(var_31, 104, 2, 0); 
-  __visc__bindIn(var_31, 105, 3, 0); 
-  __visc__bindIn(var_31, 106, 4, 0); 
-  __visc__bindIn(var_31, 107, 5, 0); 
-  __visc__bindIn(var_31, 108, 6, 0); 
-  __visc__bindIn(var_31, 109, 7, 0); 
-  __visc__bindIn(var_31, 110, 8, 0); 
-  __visc__bindIn(var_31, 111, 9, 0); 
-
-  void* var_32 = __visc__createNodeND(0, var_32_node); 
-
-  __visc__edge(var_31, var_32, 1, 0, 0, 0); 
-  __visc__edge(var_31, var_32, 1, 1, 1, 0); 
-
-  void* var_33 = __visc__createNodeND(0, var_33_node); 
-
-  __visc__edge(var_32, var_33, 1, 0, 0, 0); 
-  __visc__edge(var_32, var_33, 1, 1, 1, 0); 
-  __visc__bindIn(var_33, 112, 2, 0); 
-  __visc__bindIn(var_33, 113, 3, 0); 
-
-  void* var_34 = __visc__createNodeND(0, var_34_node); 
-
-  __visc__edge(var_33, var_34, 1, 0, 0, 0); 
-  __visc__edge(var_33, var_34, 1, 1, 1, 0); 
-  __visc__bindIn(var_34, 114, 2, 0); 
-  __visc__bindIn(var_34, 115, 3, 0); 
-  __visc__bindIn(var_34, 116, 4, 0); 
-  __visc__bindIn(var_34, 117, 5, 0); 
-  __visc__bindIn(var_34, 118, 6, 0); 
-  __visc__bindIn(var_34, 119, 7, 0); 
-  __visc__bindIn(var_34, 120, 8, 0); 
-  __visc__bindIn(var_34, 121, 9, 0); 
-
-  void* var_35 = __visc__createNodeND(0, var_35_node); 
-
-  __visc__edge(var_34, var_35, 1, 0, 0, 0); 
-  __visc__edge(var_34, var_35, 1, 1, 1, 0); 
-
-  void* var_36 = __visc__createNodeND(0, var_36_node); 
-
-  __visc__edge(var_35, var_36, 1, 0, 0, 0); 
-  __visc__edge(var_35, var_36, 1, 1, 1, 0); 
-  __visc__bindIn(var_36, 122, 2, 0); 
-  __visc__bindIn(var_36, 123, 3, 0); 
-
-  void* var_37 = __visc__createNodeND(0, var_37_node); 
-
-  __visc__edge(var_36, var_37, 1, 0, 0, 0); 
-  __visc__edge(var_36, var_37, 1, 1, 1, 0); 
-  __visc__bindIn(var_37, 124, 2, 0); 
-  __visc__bindIn(var_37, 125, 3, 0); 
-  __visc__bindIn(var_37, 126, 4, 0); 
-  __visc__bindIn(var_37, 127, 5, 0); 
-  __visc__bindIn(var_37, 128, 6, 0); 
-  __visc__bindIn(var_37, 129, 7, 0); 
-  __visc__bindIn(var_37, 130, 8, 0); 
-  __visc__bindIn(var_37, 131, 9, 0); 
-
-  void* var_38 = __visc__createNodeND(0, var_38_node); 
-
-  __visc__edge(var_37, var_38, 1, 0, 0, 0); 
-  __visc__edge(var_37, var_38, 1, 1, 1, 0); 
-
-  void* var_39 = __visc__createNodeND(0, var_39_node); 
-
-  __visc__edge(var_38, var_39, 1, 0, 0, 0); 
-  __visc__edge(var_38, var_39, 1, 1, 1, 0); 
-  __visc__bindIn(var_39, 132, 2, 0); 
-  __visc__bindIn(var_39, 133, 3, 0); 
-
-  void* var_40 = __visc__createNodeND(0, var_40_node); 
-
-  __visc__edge(var_39, var_40, 1, 0, 0, 0); 
-  __visc__edge(var_39, var_40, 1, 1, 1, 0); 
-  __visc__bindIn(var_40, 134, 2, 0); 
-  __visc__bindIn(var_40, 135, 3, 0); 
-  __visc__bindIn(var_40, 136, 4, 0); 
-  __visc__bindIn(var_40, 137, 5, 0); 
-  __visc__bindIn(var_40, 138, 6, 0); 
-  __visc__bindIn(var_40, 139, 7, 0); 
-  __visc__bindIn(var_40, 140, 8, 0); 
-  __visc__bindIn(var_40, 141, 9, 0); 
-
-  void* var_41 = __visc__createNodeND(0, var_41_node); 
-
-  __visc__edge(var_40, var_41, 1, 0, 0, 0); 
-  __visc__edge(var_40, var_41, 1, 1, 1, 0); 
-
-  void* var_42 = __visc__createNodeND(0, var_42_node); 
-
-  __visc__edge(var_41, var_42, 1, 0, 0, 0); 
-  __visc__edge(var_41, var_42, 1, 1, 1, 0); 
-  __visc__bindIn(var_42, 142, 2, 0); 
-  __visc__bindIn(var_42, 143, 3, 0); 
-
-  void* var_43 = __visc__createNodeND(0, var_43_node); 
-
-  __visc__edge(var_42, var_43, 1, 0, 0, 0); 
-  __visc__edge(var_42, var_43, 1, 1, 1, 0); 
-  __visc__bindIn(var_43, 144, 2, 0); 
-  __visc__bindIn(var_43, 145, 3, 0); 
-  __visc__bindIn(var_43, 146, 4, 0); 
-  __visc__bindIn(var_43, 147, 5, 0); 
-  __visc__bindIn(var_43, 148, 6, 0); 
-  __visc__bindIn(var_43, 149, 7, 0); 
-  __visc__bindIn(var_43, 150, 8, 0); 
-  __visc__bindIn(var_43, 151, 9, 0); 
-
-  void* var_44 = __visc__createNodeND(0, var_44_node); 
-
-  __visc__edge(var_43, var_44, 1, 0, 0, 0); 
-  __visc__edge(var_43, var_44, 1, 1, 1, 0); 
-
-  void* var_45 = __visc__createNodeND(0, var_45_node); 
-
-  __visc__edge(var_44, var_45, 1, 0, 0, 0); 
-  __visc__edge(var_44, var_45, 1, 1, 1, 0); 
-  __visc__bindIn(var_45, 152, 2, 0); 
-  __visc__bindIn(var_45, 153, 3, 0); 
-
-  void* var_46 = __visc__createNodeND(0, var_46_node); 
-
-  __visc__edge(var_45, var_46, 1, 0, 0, 0); 
-  __visc__edge(var_45, var_46, 1, 1, 1, 0); 
-  __visc__bindIn(var_46, 154, 2, 0); 
-  __visc__bindIn(var_46, 155, 3, 0); 
-  __visc__bindIn(var_46, 156, 4, 0); 
-  __visc__bindIn(var_46, 157, 5, 0); 
-  __visc__bindIn(var_46, 158, 6, 0); 
-  __visc__bindIn(var_46, 159, 7, 0); 
-  __visc__bindIn(var_46, 160, 8, 0); 
-  __visc__bindIn(var_46, 161, 9, 0); 
-
-  void* var_47 = __visc__createNodeND(0, var_47_node); 
-
-  __visc__edge(var_46, var_47, 1, 0, 0, 0); 
-  __visc__edge(var_46, var_47, 1, 1, 1, 0); 
-
-  void* var_48 = __visc__createNodeND(0, var_48_node); 
-
-  __visc__edge(var_47, var_48, 1, 0, 0, 0); 
-  __visc__edge(var_47, var_48, 1, 1, 1, 0); 
-  __visc__bindIn(var_48, 162, 2, 0); 
-  __visc__bindIn(var_48, 163, 3, 0); 
-
-  void* var_49 = __visc__createNodeND(0, var_49_node); 
-
-  __visc__edge(var_48, var_49, 1, 0, 0, 0); 
-  __visc__edge(var_48, var_49, 1, 1, 1, 0); 
-  __visc__bindIn(var_49, 164, 2, 0); 
-  __visc__bindIn(var_49, 165, 3, 0); 
-  __visc__bindIn(var_49, 166, 4, 0); 
-  __visc__bindIn(var_49, 167, 5, 0); 
-  __visc__bindIn(var_49, 168, 6, 0); 
-  __visc__bindIn(var_49, 169, 7, 0); 
-  __visc__bindIn(var_49, 170, 8, 0); 
-  __visc__bindIn(var_49, 171, 9, 0); 
-
-  void* var_50 = __visc__createNodeND(0, var_50_node); 
-
-  __visc__edge(var_49, var_50, 1, 0, 0, 0); 
-  __visc__edge(var_49, var_50, 1, 1, 1, 0); 
-
-  void* var_51 = __visc__createNodeND(0, var_51_node); 
-
-  __visc__edge(var_50, var_51, 1, 0, 0, 0); 
-  __visc__edge(var_50, var_51, 1, 1, 1, 0); 
-  __visc__bindIn(var_51, 172, 2, 0); 
-  __visc__bindIn(var_51, 173, 3, 0); 
-
-  void* var_52 = __visc__createNodeND(0, var_52_node); 
-
-  __visc__edge(var_51, var_52, 1, 0, 0, 0); 
-  __visc__edge(var_51, var_52, 1, 1, 1, 0); 
-  __visc__bindIn(var_52, 174, 2, 0); 
-  __visc__bindIn(var_52, 175, 3, 0); 
-  __visc__bindIn(var_52, 176, 4, 0); 
-  __visc__bindIn(var_52, 177, 5, 0); 
-  __visc__bindIn(var_52, 178, 6, 0); 
-  __visc__bindIn(var_52, 179, 7, 0); 
-  __visc__bindIn(var_52, 180, 8, 0); 
-  __visc__bindIn(var_52, 181, 9, 0); 
-
-  void* var_53 = __visc__createNodeND(0, var_53_node); 
-
-  __visc__edge(var_52, var_53, 1, 0, 0, 0); 
-  __visc__edge(var_52, var_53, 1, 1, 1, 0); 
-
-  void* var_54 = __visc__createNodeND(0, var_54_node); 
-
-  __visc__edge(var_53, var_54, 1, 0, 0, 0); 
-  __visc__edge(var_53, var_54, 1, 1, 1, 0); 
-  __visc__bindIn(var_54, 182, 2, 0); 
-  __visc__bindIn(var_54, 183, 3, 0); 
-
-  void* var_55 = __visc__createNodeND(0, var_55_node); 
-
-  __visc__edge(var_54, var_55, 1, 0, 0, 0); 
-  __visc__edge(var_54, var_55, 1, 1, 1, 0); 
-  __visc__bindIn(var_55, 184, 2, 0); 
-  __visc__bindIn(var_55, 185, 3, 0); 
-  __visc__bindIn(var_55, 186, 4, 0); 
-  __visc__bindIn(var_55, 187, 5, 0); 
-  __visc__bindIn(var_55, 188, 6, 0); 
-  __visc__bindIn(var_55, 189, 7, 0); 
-  __visc__bindIn(var_55, 190, 8, 0); 
-  __visc__bindIn(var_55, 191, 9, 0); 
-
-  void* var_56 = __visc__createNodeND(0, var_56_node); 
-
-  __visc__edge(var_55, var_56, 1, 0, 0, 0); 
-  __visc__edge(var_55, var_56, 1, 1, 1, 0); 
-
-  void* var_57 = __visc__createNodeND(0, var_57_node); 
-
-  __visc__edge(var_56, var_57, 1, 0, 0, 0); 
-  __visc__edge(var_56, var_57, 1, 1, 1, 0); 
-  __visc__bindIn(var_57, 192, 2, 0); 
-  __visc__bindIn(var_57, 193, 3, 0); 
-
-  void* var_58 = __visc__createNodeND(0, var_58_node); 
-
-  __visc__edge(var_57, var_58, 1, 0, 0, 0); 
-  __visc__edge(var_57, var_58, 1, 1, 1, 0); 
-  __visc__bindIn(var_58, 194, 2, 0); 
-  __visc__bindIn(var_58, 195, 3, 0); 
-  __visc__bindIn(var_58, 196, 4, 0); 
-  __visc__bindIn(var_58, 197, 5, 0); 
-  __visc__bindIn(var_58, 198, 6, 0); 
-  __visc__bindIn(var_58, 199, 7, 0); 
-  __visc__bindIn(var_58, 200, 8, 0); 
-  __visc__bindIn(var_58, 201, 9, 0); 
-
-  void* var_59 = __visc__createNodeND(0, var_59_node); 
-
-  __visc__edge(var_58, var_59, 1, 0, 0, 0); 
-  __visc__edge(var_58, var_59, 1, 1, 1, 0); 
-
-  void* var_60 = __visc__createNodeND(0, var_60_node); 
-
-  __visc__edge(var_59, var_60, 1, 0, 0, 0); 
-  __visc__edge(var_59, var_60, 1, 1, 1, 0); 
-  __visc__bindIn(var_60, 202, 2, 0); 
-  __visc__bindIn(var_60, 203, 3, 0); 
-
-  void* var_61 = __visc__createNodeND(0, var_61_node); 
-
-  __visc__edge(var_60, var_61, 1, 0, 0, 0); 
-  __visc__edge(var_60, var_61, 1, 1, 1, 0); 
-  __visc__bindIn(var_61, 204, 2, 0); 
-  __visc__bindIn(var_61, 205, 3, 0); 
-  __visc__bindIn(var_61, 206, 4, 0); 
-  __visc__bindIn(var_61, 207, 5, 0); 
-  __visc__bindIn(var_61, 208, 6, 0); 
-  __visc__bindIn(var_61, 209, 7, 0); 
-  __visc__bindIn(var_61, 210, 8, 0); 
-  __visc__bindIn(var_61, 211, 9, 0); 
-
-  void* var_62 = __visc__createNodeND(0, var_62_node); 
-
-  __visc__edge(var_61, var_62, 1, 0, 0, 0); 
-  __visc__edge(var_61, var_62, 1, 1, 1, 0); 
-
-  void* var_63 = __visc__createNodeND(0, var_63_node); 
-
-  __visc__edge(var_62, var_63, 1, 0, 0, 0); 
-  __visc__edge(var_62, var_63, 1, 1, 1, 0); 
-  __visc__bindIn(var_63, 212, 2, 0); 
-  __visc__bindIn(var_63, 213, 3, 0); 
-
-  void* var_64 = __visc__createNodeND(0, var_64_node); 
-
-  __visc__edge(var_63, var_64, 1, 0, 0, 0); 
-  __visc__edge(var_63, var_64, 1, 1, 1, 0); 
-  __visc__bindIn(var_64, 214, 2, 0); 
-  __visc__bindIn(var_64, 215, 3, 0); 
-  __visc__bindIn(var_64, 216, 4, 0); 
-  __visc__bindIn(var_64, 217, 5, 0); 
-  __visc__bindIn(var_64, 218, 6, 0); 
-  __visc__bindIn(var_64, 219, 7, 0); 
-  __visc__bindIn(var_64, 220, 8, 0); 
-  __visc__bindIn(var_64, 221, 9, 0); 
-
-  void* var_65 = __visc__createNodeND(0, var_65_node); 
-
-  __visc__edge(var_64, var_65, 1, 0, 0, 0); 
-  __visc__edge(var_64, var_65, 1, 1, 1, 0); 
-
-  void* var_66 = __visc__createNodeND(0, var_66_node); 
-
-  __visc__edge(var_65, var_66, 1, 0, 0, 0); 
-  __visc__edge(var_65, var_66, 1, 1, 1, 0); 
-  __visc__bindIn(var_66, 222, 2, 0); 
-  __visc__bindIn(var_66, 223, 3, 0); 
-
-  void* var_67 = __visc__createNodeND(0, var_67_node); 
-
-  __visc__edge(var_66, var_67, 1, 0, 0, 0); 
-  __visc__edge(var_66, var_67, 1, 1, 1, 0); 
-  __visc__bindIn(var_67, 224, 2, 0); 
-  __visc__bindIn(var_67, 225, 3, 0); 
-  __visc__bindIn(var_67, 226, 4, 0); 
-  __visc__bindIn(var_67, 227, 5, 0); 
-  __visc__bindIn(var_67, 228, 6, 0); 
-  __visc__bindIn(var_67, 229, 7, 0); 
-  __visc__bindIn(var_67, 230, 8, 0); 
-  __visc__bindIn(var_67, 231, 9, 0); 
-
-  void* var_68 = __visc__createNodeND(0, var_68_node); 
-
-  __visc__edge(var_67, var_68, 1, 0, 0, 0); 
-  __visc__edge(var_67, var_68, 1, 1, 1, 0); 
-
-  void* var_69 = __visc__createNodeND(0, var_69_node); 
-
-  __visc__edge(var_68, var_69, 1, 0, 0, 0); 
-  __visc__edge(var_68, var_69, 1, 1, 1, 0); 
-  __visc__bindIn(var_69, 232, 2, 0); 
-  __visc__bindIn(var_69, 233, 3, 0); 
-
-  void* var_70 = __visc__createNodeND(0, var_70_node); 
-
-  __visc__edge(var_69, var_70, 1, 0, 0, 0); 
-  __visc__edge(var_69, var_70, 1, 1, 1, 0); 
-  __visc__bindIn(var_70, 234, 2, 0); 
-  __visc__bindIn(var_70, 235, 3, 0); 
-  __visc__bindIn(var_70, 236, 4, 0); 
-  __visc__bindIn(var_70, 237, 5, 0); 
-  __visc__bindIn(var_70, 238, 6, 0); 
-  __visc__bindIn(var_70, 239, 7, 0); 
-  __visc__bindIn(var_70, 240, 8, 0); 
-  __visc__bindIn(var_70, 241, 9, 0); 
-
-  void* var_71 = __visc__createNodeND(0, var_71_node); 
-
-  __visc__edge(var_70, var_71, 1, 0, 0, 0); 
-  __visc__edge(var_70, var_71, 1, 1, 1, 0); 
-
-  void* var_72 = __visc__createNodeND(0, var_72_node); 
-
-  __visc__edge(var_71, var_72, 1, 0, 0, 0); 
-  __visc__edge(var_71, var_72, 1, 1, 1, 0); 
-  __visc__bindIn(var_72, 242, 2, 0); 
-  __visc__bindIn(var_72, 243, 3, 0); 
-
-  void* var_73 = __visc__createNodeND(0, var_73_node); 
-
-  __visc__edge(var_72, var_73, 1, 0, 0, 0); 
-  __visc__edge(var_72, var_73, 1, 1, 1, 0); 
-  __visc__bindIn(var_73, 244, 2, 0); 
-  __visc__bindIn(var_73, 245, 3, 0); 
-  __visc__bindIn(var_73, 246, 4, 0); 
-  __visc__bindIn(var_73, 247, 5, 0); 
-  __visc__bindIn(var_73, 248, 6, 0); 
-  __visc__bindIn(var_73, 249, 7, 0); 
-  __visc__bindIn(var_73, 250, 8, 0); 
-  __visc__bindIn(var_73, 251, 9, 0); 
-
-  void* var_74 = __visc__createNodeND(0, var_74_node); 
-
-  __visc__edge(var_73, var_74, 1, 0, 0, 0); 
-  __visc__edge(var_73, var_74, 1, 1, 1, 0); 
-
-  void* var_75 = __visc__createNodeND(0, var_75_node); 
-
-  __visc__edge(var_74, var_75, 1, 0, 0, 0); 
-  __visc__edge(var_74, var_75, 1, 1, 1, 0); 
-  __visc__bindIn(var_75, 252, 2, 0); 
-  __visc__bindIn(var_75, 253, 3, 0); 
-
-  void* var_76 = __visc__createNodeND(0, var_76_node); 
-
-  __visc__edge(var_75, var_76, 1, 0, 0, 0); 
-  __visc__edge(var_75, var_76, 1, 1, 1, 0); 
-  __visc__bindIn(var_76, 254, 2, 0); 
-  __visc__bindIn(var_76, 255, 3, 0); 
-  __visc__bindIn(var_76, 256, 4, 0); 
-  __visc__bindIn(var_76, 257, 5, 0); 
-  __visc__bindIn(var_76, 258, 6, 0); 
-  __visc__bindIn(var_76, 259, 7, 0); 
-  __visc__bindIn(var_76, 260, 8, 0); 
-  __visc__bindIn(var_76, 261, 9, 0); 
-
-  void* var_77 = __visc__createNodeND(0, var_77_node); 
-
-  __visc__edge(var_76, var_77, 1, 0, 0, 0); 
-  __visc__edge(var_76, var_77, 1, 1, 1, 0); 
-
-  void* var_78 = __visc__createNodeND(0, var_78_node); 
-
-  __visc__edge(var_77, var_78, 1, 0, 0, 0); 
-  __visc__edge(var_77, var_78, 1, 1, 1, 0); 
-  __visc__bindIn(var_78, 262, 2, 0); 
-  __visc__bindIn(var_78, 263, 3, 0); 
-
-  void* var_79 = __visc__createNodeND(0, var_79_node); 
-
-  __visc__edge(var_78, var_79, 1, 0, 0, 0); 
-  __visc__edge(var_78, var_79, 1, 1, 1, 0); 
-  __visc__bindIn(var_79, 264, 2, 0); 
-  __visc__bindIn(var_79, 265, 3, 0); 
-  __visc__bindIn(var_79, 266, 4, 0); 
-  __visc__bindIn(var_79, 267, 5, 0); 
-  __visc__bindIn(var_79, 268, 6, 0); 
-  __visc__bindIn(var_79, 269, 7, 0); 
-  __visc__bindIn(var_79, 270, 8, 0); 
-  __visc__bindIn(var_79, 271, 9, 0); 
-
-  void* var_80 = __visc__createNodeND(0, var_80_node); 
-
-  __visc__edge(var_79, var_80, 1, 0, 0, 0); 
-  __visc__edge(var_79, var_80, 1, 1, 1, 0); 
-
-  void* var_81 = __visc__createNodeND(0, var_81_node); 
-
-  __visc__edge(var_80, var_81, 1, 0, 0, 0); 
-  __visc__edge(var_80, var_81, 1, 1, 1, 0); 
-
-  void* var_82 = __visc__createNodeND(0, var_82_node); 
-
-  __visc__edge(var_81, var_82, 1, 0, 0, 0); 
-  __visc__edge(var_81, var_82, 1, 1, 1, 0); 
-  __visc__bindIn(var_82, 272, 2, 0); 
-  __visc__bindIn(var_82, 273, 3, 0); 
-
-  void* var_83 = __visc__createNodeND(0, var_83_node); 
-
-  __visc__edge(var_82, var_83, 1, 0, 0, 0); 
-  __visc__edge(var_82, var_83, 1, 1, 1, 0); 
-  __visc__bindIn(var_83, 274, 2, 0); 
-  __visc__bindIn(var_83, 275, 3, 0); 
-
-  void* var_84 = __visc__createNodeND(0, var_84_node); 
-
-  __visc__edge(var_83, var_84, 1, 0, 0, 0); 
-  __visc__edge(var_83, var_84, 1, 1, 1, 0); 
-
-  __visc__bindOut(var_84, 0, 0, 0); 
-  __visc__bindOut(var_84, 1, 1, 0); 
-
-}
-
-struct ret_t {
-  void* tensor; 
-  size_t bytes; 
-}; 
-
-typedef struct __attribute__((__packed__)) {
-  void* input; 
-  size_t input_bytes; 
-  void* conv2d_1_w; 
-  size_t conv2d_1_w_bytes; 
-  void* batch_normalization_1_gamma; 
-  size_t batch_normalization_1_gamma_bytes; 
-  void* batch_normalization_1_beta; 
-  size_t batch_normalization_1_beta_bytes; 
-  void* batch_normalization_1_mean; 
-  size_t batch_normalization_1_mean_bytes; 
-  void* batch_normalization_1_variance; 
-  size_t batch_normalization_1_variance_bytes; 
-  void* depthwise_conv2d_1_w; 
-  size_t depthwise_conv2d_1_w_bytes; 
-  void* batch_normalization_2_gamma; 
-  size_t batch_normalization_2_gamma_bytes; 
-  void* batch_normalization_2_beta; 
-  size_t batch_normalization_2_beta_bytes; 
-  void* batch_normalization_2_mean; 
-  size_t batch_normalization_2_mean_bytes; 
-  void* batch_normalization_2_variance; 
-  size_t batch_normalization_2_variance_bytes; 
-  void* conv2d_2_w; 
-  size_t conv2d_2_w_bytes; 
-  void* batch_normalization_3_gamma; 
-  size_t batch_normalization_3_gamma_bytes; 
-  void* batch_normalization_3_beta; 
-  size_t batch_normalization_3_beta_bytes; 
-  void* batch_normalization_3_mean; 
-  size_t batch_normalization_3_mean_bytes; 
-  void* batch_normalization_3_variance; 
-  size_t batch_normalization_3_variance_bytes; 
-  void* depthwise_conv2d_2_w; 
-  size_t depthwise_conv2d_2_w_bytes; 
-  void* batch_normalization_4_gamma; 
-  size_t batch_normalization_4_gamma_bytes; 
-  void* batch_normalization_4_beta; 
-  size_t batch_normalization_4_beta_bytes; 
-  void* batch_normalization_4_mean; 
-  size_t batch_normalization_4_mean_bytes; 
-  void* batch_normalization_4_variance; 
-  size_t batch_normalization_4_variance_bytes; 
-  void* conv2d_3_w; 
-  size_t conv2d_3_w_bytes; 
-  void* batch_normalization_5_gamma; 
-  size_t batch_normalization_5_gamma_bytes; 
-  void* batch_normalization_5_beta; 
-  size_t batch_normalization_5_beta_bytes; 
-  void* batch_normalization_5_mean; 
-  size_t batch_normalization_5_mean_bytes; 
-  void* batch_normalization_5_variance; 
-  size_t batch_normalization_5_variance_bytes; 
-  void* depthwise_conv2d_3_w; 
-  size_t depthwise_conv2d_3_w_bytes; 
-  void* batch_normalization_6_gamma; 
-  size_t batch_normalization_6_gamma_bytes; 
-  void* batch_normalization_6_beta; 
-  size_t batch_normalization_6_beta_bytes; 
-  void* batch_normalization_6_mean; 
-  size_t batch_normalization_6_mean_bytes; 
-  void* batch_normalization_6_variance; 
-  size_t batch_normalization_6_variance_bytes; 
-  void* conv2d_4_w; 
-  size_t conv2d_4_w_bytes; 
-  void* batch_normalization_7_gamma; 
-  size_t batch_normalization_7_gamma_bytes; 
-  void* batch_normalization_7_beta; 
-  size_t batch_normalization_7_beta_bytes; 
-  void* batch_normalization_7_mean; 
-  size_t batch_normalization_7_mean_bytes; 
-  void* batch_normalization_7_variance; 
-  size_t batch_normalization_7_variance_bytes; 
-  void* depthwise_conv2d_4_w; 
-  size_t depthwise_conv2d_4_w_bytes; 
-  void* batch_normalization_8_gamma; 
-  size_t batch_normalization_8_gamma_bytes; 
-  void* batch_normalization_8_beta; 
-  size_t batch_normalization_8_beta_bytes; 
-  void* batch_normalization_8_mean; 
-  size_t batch_normalization_8_mean_bytes; 
-  void* batch_normalization_8_variance; 
-  size_t batch_normalization_8_variance_bytes; 
-  void* conv2d_5_w; 
-  size_t conv2d_5_w_bytes; 
-  void* batch_normalization_9_gamma; 
-  size_t batch_normalization_9_gamma_bytes; 
-  void* batch_normalization_9_beta; 
-  size_t batch_normalization_9_beta_bytes; 
-  void* batch_normalization_9_mean; 
-  size_t batch_normalization_9_mean_bytes; 
-  void* batch_normalization_9_variance; 
-  size_t batch_normalization_9_variance_bytes; 
-  void* depthwise_conv2d_5_w; 
-  size_t depthwise_conv2d_5_w_bytes; 
-  void* batch_normalization_10_gamma; 
-  size_t batch_normalization_10_gamma_bytes; 
-  void* batch_normalization_10_beta; 
-  size_t batch_normalization_10_beta_bytes; 
-  void* batch_normalization_10_mean; 
-  size_t batch_normalization_10_mean_bytes; 
-  void* batch_normalization_10_variance; 
-  size_t batch_normalization_10_variance_bytes; 
-  void* conv2d_6_w; 
-  size_t conv2d_6_w_bytes; 
-  void* batch_normalization_11_gamma; 
-  size_t batch_normalization_11_gamma_bytes; 
-  void* batch_normalization_11_beta; 
-  size_t batch_normalization_11_beta_bytes; 
-  void* batch_normalization_11_mean; 
-  size_t batch_normalization_11_mean_bytes; 
-  void* batch_normalization_11_variance; 
-  size_t batch_normalization_11_variance_bytes; 
-  void* depthwise_conv2d_6_w; 
-  size_t depthwise_conv2d_6_w_bytes; 
-  void* batch_normalization_12_gamma; 
-  size_t batch_normalization_12_gamma_bytes; 
-  void* batch_normalization_12_beta; 
-  size_t batch_normalization_12_beta_bytes; 
-  void* batch_normalization_12_mean; 
-  size_t batch_normalization_12_mean_bytes; 
-  void* batch_normalization_12_variance; 
-  size_t batch_normalization_12_variance_bytes; 
-  void* conv2d_7_w; 
-  size_t conv2d_7_w_bytes; 
-  void* batch_normalization_13_gamma; 
-  size_t batch_normalization_13_gamma_bytes; 
-  void* batch_normalization_13_beta; 
-  size_t batch_normalization_13_beta_bytes; 
-  void* batch_normalization_13_mean; 
-  size_t batch_normalization_13_mean_bytes; 
-  void* batch_normalization_13_variance; 
-  size_t batch_normalization_13_variance_bytes; 
-  void* depthwise_conv2d_7_w; 
-  size_t depthwise_conv2d_7_w_bytes; 
-  void* batch_normalization_14_gamma; 
-  size_t batch_normalization_14_gamma_bytes; 
-  void* batch_normalization_14_beta; 
-  size_t batch_normalization_14_beta_bytes; 
-  void* batch_normalization_14_mean; 
-  size_t batch_normalization_14_mean_bytes; 
-  void* batch_normalization_14_variance; 
-  size_t batch_normalization_14_variance_bytes; 
-  void* conv2d_8_w; 
-  size_t conv2d_8_w_bytes; 
-  void* batch_normalization_15_gamma; 
-  size_t batch_normalization_15_gamma_bytes; 
-  void* batch_normalization_15_beta; 
-  size_t batch_normalization_15_beta_bytes; 
-  void* batch_normalization_15_mean; 
-  size_t batch_normalization_15_mean_bytes; 
-  void* batch_normalization_15_variance; 
-  size_t batch_normalization_15_variance_bytes; 
-  void* depthwise_conv2d_8_w; 
-  size_t depthwise_conv2d_8_w_bytes; 
-  void* batch_normalization_16_gamma; 
-  size_t batch_normalization_16_gamma_bytes; 
-  void* batch_normalization_16_beta; 
-  size_t batch_normalization_16_beta_bytes; 
-  void* batch_normalization_16_mean; 
-  size_t batch_normalization_16_mean_bytes; 
-  void* batch_normalization_16_variance; 
-  size_t batch_normalization_16_variance_bytes; 
-  void* conv2d_9_w; 
-  size_t conv2d_9_w_bytes; 
-  void* batch_normalization_17_gamma; 
-  size_t batch_normalization_17_gamma_bytes; 
-  void* batch_normalization_17_beta; 
-  size_t batch_normalization_17_beta_bytes; 
-  void* batch_normalization_17_mean; 
-  size_t batch_normalization_17_mean_bytes; 
-  void* batch_normalization_17_variance; 
-  size_t batch_normalization_17_variance_bytes; 
-  void* depthwise_conv2d_9_w; 
-  size_t depthwise_conv2d_9_w_bytes; 
-  void* batch_normalization_18_gamma; 
-  size_t batch_normalization_18_gamma_bytes; 
-  void* batch_normalization_18_beta; 
-  size_t batch_normalization_18_beta_bytes; 
-  void* batch_normalization_18_mean; 
-  size_t batch_normalization_18_mean_bytes; 
-  void* batch_normalization_18_variance; 
-  size_t batch_normalization_18_variance_bytes; 
-  void* conv2d_10_w; 
-  size_t conv2d_10_w_bytes; 
-  void* batch_normalization_19_gamma; 
-  size_t batch_normalization_19_gamma_bytes; 
-  void* batch_normalization_19_beta; 
-  size_t batch_normalization_19_beta_bytes; 
-  void* batch_normalization_19_mean; 
-  size_t batch_normalization_19_mean_bytes; 
-  void* batch_normalization_19_variance; 
-  size_t batch_normalization_19_variance_bytes; 
-  void* depthwise_conv2d_10_w; 
-  size_t depthwise_conv2d_10_w_bytes; 
-  void* batch_normalization_20_gamma; 
-  size_t batch_normalization_20_gamma_bytes; 
-  void* batch_normalization_20_beta; 
-  size_t batch_normalization_20_beta_bytes; 
-  void* batch_normalization_20_mean; 
-  size_t batch_normalization_20_mean_bytes; 
-  void* batch_normalization_20_variance; 
-  size_t batch_normalization_20_variance_bytes; 
-  void* conv2d_11_w; 
-  size_t conv2d_11_w_bytes; 
-  void* batch_normalization_21_gamma; 
-  size_t batch_normalization_21_gamma_bytes; 
-  void* batch_normalization_21_beta; 
-  size_t batch_normalization_21_beta_bytes; 
-  void* batch_normalization_21_mean; 
-  size_t batch_normalization_21_mean_bytes; 
-  void* batch_normalization_21_variance; 
-  size_t batch_normalization_21_variance_bytes; 
-  void* depthwise_conv2d_11_w; 
-  size_t depthwise_conv2d_11_w_bytes; 
-  void* batch_normalization_22_gamma; 
-  size_t batch_normalization_22_gamma_bytes; 
-  void* batch_normalization_22_beta; 
-  size_t batch_normalization_22_beta_bytes; 
-  void* batch_normalization_22_mean; 
-  size_t batch_normalization_22_mean_bytes; 
-  void* batch_normalization_22_variance; 
-  size_t batch_normalization_22_variance_bytes; 
-  void* conv2d_12_w; 
-  size_t conv2d_12_w_bytes; 
-  void* batch_normalization_23_gamma; 
-  size_t batch_normalization_23_gamma_bytes; 
-  void* batch_normalization_23_beta; 
-  size_t batch_normalization_23_beta_bytes; 
-  void* batch_normalization_23_mean; 
-  size_t batch_normalization_23_mean_bytes; 
-  void* batch_normalization_23_variance; 
-  size_t batch_normalization_23_variance_bytes; 
-  void* depthwise_conv2d_12_w; 
-  size_t depthwise_conv2d_12_w_bytes; 
-  void* batch_normalization_24_gamma; 
-  size_t batch_normalization_24_gamma_bytes; 
-  void* batch_normalization_24_beta; 
-  size_t batch_normalization_24_beta_bytes; 
-  void* batch_normalization_24_mean; 
-  size_t batch_normalization_24_mean_bytes; 
-  void* batch_normalization_24_variance; 
-  size_t batch_normalization_24_variance_bytes; 
-  void* conv2d_13_w; 
-  size_t conv2d_13_w_bytes; 
-  void* batch_normalization_25_gamma; 
-  size_t batch_normalization_25_gamma_bytes; 
-  void* batch_normalization_25_beta; 
-  size_t batch_normalization_25_beta_bytes; 
-  void* batch_normalization_25_mean; 
-  size_t batch_normalization_25_mean_bytes; 
-  void* batch_normalization_25_variance; 
-  size_t batch_normalization_25_variance_bytes; 
-  void* depthwise_conv2d_13_w; 
-  size_t depthwise_conv2d_13_w_bytes; 
-  void* batch_normalization_26_gamma; 
-  size_t batch_normalization_26_gamma_bytes; 
-  void* batch_normalization_26_beta; 
-  size_t batch_normalization_26_beta_bytes; 
-  void* batch_normalization_26_mean; 
-  size_t batch_normalization_26_mean_bytes; 
-  void* batch_normalization_26_variance; 
-  size_t batch_normalization_26_variance_bytes; 
-  void* conv2d_14_w; 
-  size_t conv2d_14_w_bytes; 
-  void* batch_normalization_27_gamma; 
-  size_t batch_normalization_27_gamma_bytes; 
-  void* batch_normalization_27_beta; 
-  size_t batch_normalization_27_beta_bytes; 
-  void* batch_normalization_27_mean; 
-  size_t batch_normalization_27_mean_bytes; 
-  void* batch_normalization_27_variance; 
-  size_t batch_normalization_27_variance_bytes; 
-  void* dense_1_w; 
-  size_t dense_1_w_bytes; 
-  void* dense_1_b; 
-  size_t dense_1_b_bytes; 
-
-  struct ret_t r; 
-}
-RootIn;
-
-int main(){ 
-
-std::string dir_prefix = std::string("data/mobilenet_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-
-__visc__init(); 
-RootIn* args = static_cast<RootIn*>(malloc(sizeof(RootIn))); 
-
-args->input = input; 
-args->input_bytes = 0; 
-args->conv2d_1_w = conv2d_1_w; 
-args->conv2d_1_w_bytes = 0; 
-args->batch_normalization_1_gamma = batch_normalization_1_gamma; 
-args->batch_normalization_1_gamma_bytes = 0; 
-args->batch_normalization_1_beta = batch_normalization_1_beta; 
-args->batch_normalization_1_beta_bytes = 0; 
-args->batch_normalization_1_mean = batch_normalization_1_mean; 
-args->batch_normalization_1_mean_bytes = 0; 
-args->batch_normalization_1_variance = batch_normalization_1_variance; 
-args->batch_normalization_1_variance_bytes = 0; 
-args->depthwise_conv2d_1_w = depthwise_conv2d_1_w; 
-args->depthwise_conv2d_1_w_bytes = 0; 
-args->batch_normalization_2_gamma = batch_normalization_2_gamma; 
-args->batch_normalization_2_gamma_bytes = 0; 
-args->batch_normalization_2_beta = batch_normalization_2_beta; 
-args->batch_normalization_2_beta_bytes = 0; 
-args->batch_normalization_2_mean = batch_normalization_2_mean; 
-args->batch_normalization_2_mean_bytes = 0; 
-args->batch_normalization_2_variance = batch_normalization_2_variance; 
-args->batch_normalization_2_variance_bytes = 0; 
-args->conv2d_2_w = conv2d_2_w; 
-args->conv2d_2_w_bytes = 0; 
-args->batch_normalization_3_gamma = batch_normalization_3_gamma; 
-args->batch_normalization_3_gamma_bytes = 0; 
-args->batch_normalization_3_beta = batch_normalization_3_beta; 
-args->batch_normalization_3_beta_bytes = 0; 
-args->batch_normalization_3_mean = batch_normalization_3_mean; 
-args->batch_normalization_3_mean_bytes = 0; 
-args->batch_normalization_3_variance = batch_normalization_3_variance; 
-args->batch_normalization_3_variance_bytes = 0; 
-args->depthwise_conv2d_2_w = depthwise_conv2d_2_w; 
-args->depthwise_conv2d_2_w_bytes = 0; 
-args->batch_normalization_4_gamma = batch_normalization_4_gamma; 
-args->batch_normalization_4_gamma_bytes = 0; 
-args->batch_normalization_4_beta = batch_normalization_4_beta; 
-args->batch_normalization_4_beta_bytes = 0; 
-args->batch_normalization_4_mean = batch_normalization_4_mean; 
-args->batch_normalization_4_mean_bytes = 0; 
-args->batch_normalization_4_variance = batch_normalization_4_variance; 
-args->batch_normalization_4_variance_bytes = 0; 
-args->conv2d_3_w = conv2d_3_w; 
-args->conv2d_3_w_bytes = 0; 
-args->batch_normalization_5_gamma = batch_normalization_5_gamma; 
-args->batch_normalization_5_gamma_bytes = 0; 
-args->batch_normalization_5_beta = batch_normalization_5_beta; 
-args->batch_normalization_5_beta_bytes = 0; 
-args->batch_normalization_5_mean = batch_normalization_5_mean; 
-args->batch_normalization_5_mean_bytes = 0; 
-args->batch_normalization_5_variance = batch_normalization_5_variance; 
-args->batch_normalization_5_variance_bytes = 0; 
-args->depthwise_conv2d_3_w = depthwise_conv2d_3_w; 
-args->depthwise_conv2d_3_w_bytes = 0; 
-args->batch_normalization_6_gamma = batch_normalization_6_gamma; 
-args->batch_normalization_6_gamma_bytes = 0; 
-args->batch_normalization_6_beta = batch_normalization_6_beta; 
-args->batch_normalization_6_beta_bytes = 0; 
-args->batch_normalization_6_mean = batch_normalization_6_mean; 
-args->batch_normalization_6_mean_bytes = 0; 
-args->batch_normalization_6_variance = batch_normalization_6_variance; 
-args->batch_normalization_6_variance_bytes = 0; 
-args->conv2d_4_w = conv2d_4_w; 
-args->conv2d_4_w_bytes = 0; 
-args->batch_normalization_7_gamma = batch_normalization_7_gamma; 
-args->batch_normalization_7_gamma_bytes = 0; 
-args->batch_normalization_7_beta = batch_normalization_7_beta; 
-args->batch_normalization_7_beta_bytes = 0; 
-args->batch_normalization_7_mean = batch_normalization_7_mean; 
-args->batch_normalization_7_mean_bytes = 0; 
-args->batch_normalization_7_variance = batch_normalization_7_variance; 
-args->batch_normalization_7_variance_bytes = 0; 
-args->depthwise_conv2d_4_w = depthwise_conv2d_4_w; 
-args->depthwise_conv2d_4_w_bytes = 0; 
-args->batch_normalization_8_gamma = batch_normalization_8_gamma; 
-args->batch_normalization_8_gamma_bytes = 0; 
-args->batch_normalization_8_beta = batch_normalization_8_beta; 
-args->batch_normalization_8_beta_bytes = 0; 
-args->batch_normalization_8_mean = batch_normalization_8_mean; 
-args->batch_normalization_8_mean_bytes = 0; 
-args->batch_normalization_8_variance = batch_normalization_8_variance; 
-args->batch_normalization_8_variance_bytes = 0; 
-args->conv2d_5_w = conv2d_5_w; 
-args->conv2d_5_w_bytes = 0; 
-args->batch_normalization_9_gamma = batch_normalization_9_gamma; 
-args->batch_normalization_9_gamma_bytes = 0; 
-args->batch_normalization_9_beta = batch_normalization_9_beta; 
-args->batch_normalization_9_beta_bytes = 0; 
-args->batch_normalization_9_mean = batch_normalization_9_mean; 
-args->batch_normalization_9_mean_bytes = 0; 
-args->batch_normalization_9_variance = batch_normalization_9_variance; 
-args->batch_normalization_9_variance_bytes = 0; 
-args->depthwise_conv2d_5_w = depthwise_conv2d_5_w; 
-args->depthwise_conv2d_5_w_bytes = 0; 
-args->batch_normalization_10_gamma = batch_normalization_10_gamma; 
-args->batch_normalization_10_gamma_bytes = 0; 
-args->batch_normalization_10_beta = batch_normalization_10_beta; 
-args->batch_normalization_10_beta_bytes = 0; 
-args->batch_normalization_10_mean = batch_normalization_10_mean; 
-args->batch_normalization_10_mean_bytes = 0; 
-args->batch_normalization_10_variance = batch_normalization_10_variance; 
-args->batch_normalization_10_variance_bytes = 0; 
-args->conv2d_6_w = conv2d_6_w; 
-args->conv2d_6_w_bytes = 0; 
-args->batch_normalization_11_gamma = batch_normalization_11_gamma; 
-args->batch_normalization_11_gamma_bytes = 0; 
-args->batch_normalization_11_beta = batch_normalization_11_beta; 
-args->batch_normalization_11_beta_bytes = 0; 
-args->batch_normalization_11_mean = batch_normalization_11_mean; 
-args->batch_normalization_11_mean_bytes = 0; 
-args->batch_normalization_11_variance = batch_normalization_11_variance; 
-args->batch_normalization_11_variance_bytes = 0; 
-args->depthwise_conv2d_6_w = depthwise_conv2d_6_w; 
-args->depthwise_conv2d_6_w_bytes = 0; 
-args->batch_normalization_12_gamma = batch_normalization_12_gamma; 
-args->batch_normalization_12_gamma_bytes = 0; 
-args->batch_normalization_12_beta = batch_normalization_12_beta; 
-args->batch_normalization_12_beta_bytes = 0; 
-args->batch_normalization_12_mean = batch_normalization_12_mean; 
-args->batch_normalization_12_mean_bytes = 0; 
-args->batch_normalization_12_variance = batch_normalization_12_variance; 
-args->batch_normalization_12_variance_bytes = 0; 
-args->conv2d_7_w = conv2d_7_w; 
-args->conv2d_7_w_bytes = 0; 
-args->batch_normalization_13_gamma = batch_normalization_13_gamma; 
-args->batch_normalization_13_gamma_bytes = 0; 
-args->batch_normalization_13_beta = batch_normalization_13_beta; 
-args->batch_normalization_13_beta_bytes = 0; 
-args->batch_normalization_13_mean = batch_normalization_13_mean; 
-args->batch_normalization_13_mean_bytes = 0; 
-args->batch_normalization_13_variance = batch_normalization_13_variance; 
-args->batch_normalization_13_variance_bytes = 0; 
-args->depthwise_conv2d_7_w = depthwise_conv2d_7_w; 
-args->depthwise_conv2d_7_w_bytes = 0; 
-args->batch_normalization_14_gamma = batch_normalization_14_gamma; 
-args->batch_normalization_14_gamma_bytes = 0; 
-args->batch_normalization_14_beta = batch_normalization_14_beta; 
-args->batch_normalization_14_beta_bytes = 0; 
-args->batch_normalization_14_mean = batch_normalization_14_mean; 
-args->batch_normalization_14_mean_bytes = 0; 
-args->batch_normalization_14_variance = batch_normalization_14_variance; 
-args->batch_normalization_14_variance_bytes = 0; 
-args->conv2d_8_w = conv2d_8_w; 
-args->conv2d_8_w_bytes = 0; 
-args->batch_normalization_15_gamma = batch_normalization_15_gamma; 
-args->batch_normalization_15_gamma_bytes = 0; 
-args->batch_normalization_15_beta = batch_normalization_15_beta; 
-args->batch_normalization_15_beta_bytes = 0; 
-args->batch_normalization_15_mean = batch_normalization_15_mean; 
-args->batch_normalization_15_mean_bytes = 0; 
-args->batch_normalization_15_variance = batch_normalization_15_variance; 
-args->batch_normalization_15_variance_bytes = 0; 
-args->depthwise_conv2d_8_w = depthwise_conv2d_8_w; 
-args->depthwise_conv2d_8_w_bytes = 0; 
-args->batch_normalization_16_gamma = batch_normalization_16_gamma; 
-args->batch_normalization_16_gamma_bytes = 0; 
-args->batch_normalization_16_beta = batch_normalization_16_beta; 
-args->batch_normalization_16_beta_bytes = 0; 
-args->batch_normalization_16_mean = batch_normalization_16_mean; 
-args->batch_normalization_16_mean_bytes = 0; 
-args->batch_normalization_16_variance = batch_normalization_16_variance; 
-args->batch_normalization_16_variance_bytes = 0; 
-args->conv2d_9_w = conv2d_9_w; 
-args->conv2d_9_w_bytes = 0; 
-args->batch_normalization_17_gamma = batch_normalization_17_gamma; 
-args->batch_normalization_17_gamma_bytes = 0; 
-args->batch_normalization_17_beta = batch_normalization_17_beta; 
-args->batch_normalization_17_beta_bytes = 0; 
-args->batch_normalization_17_mean = batch_normalization_17_mean; 
-args->batch_normalization_17_mean_bytes = 0; 
-args->batch_normalization_17_variance = batch_normalization_17_variance; 
-args->batch_normalization_17_variance_bytes = 0; 
-args->depthwise_conv2d_9_w = depthwise_conv2d_9_w; 
-args->depthwise_conv2d_9_w_bytes = 0; 
-args->batch_normalization_18_gamma = batch_normalization_18_gamma; 
-args->batch_normalization_18_gamma_bytes = 0; 
-args->batch_normalization_18_beta = batch_normalization_18_beta; 
-args->batch_normalization_18_beta_bytes = 0; 
-args->batch_normalization_18_mean = batch_normalization_18_mean; 
-args->batch_normalization_18_mean_bytes = 0; 
-args->batch_normalization_18_variance = batch_normalization_18_variance; 
-args->batch_normalization_18_variance_bytes = 0; 
-args->conv2d_10_w = conv2d_10_w; 
-args->conv2d_10_w_bytes = 0; 
-args->batch_normalization_19_gamma = batch_normalization_19_gamma; 
-args->batch_normalization_19_gamma_bytes = 0; 
-args->batch_normalization_19_beta = batch_normalization_19_beta; 
-args->batch_normalization_19_beta_bytes = 0; 
-args->batch_normalization_19_mean = batch_normalization_19_mean; 
-args->batch_normalization_19_mean_bytes = 0; 
-args->batch_normalization_19_variance = batch_normalization_19_variance; 
-args->batch_normalization_19_variance_bytes = 0; 
-args->depthwise_conv2d_10_w = depthwise_conv2d_10_w; 
-args->depthwise_conv2d_10_w_bytes = 0; 
-args->batch_normalization_20_gamma = batch_normalization_20_gamma; 
-args->batch_normalization_20_gamma_bytes = 0; 
-args->batch_normalization_20_beta = batch_normalization_20_beta; 
-args->batch_normalization_20_beta_bytes = 0; 
-args->batch_normalization_20_mean = batch_normalization_20_mean; 
-args->batch_normalization_20_mean_bytes = 0; 
-args->batch_normalization_20_variance = batch_normalization_20_variance; 
-args->batch_normalization_20_variance_bytes = 0; 
-args->conv2d_11_w = conv2d_11_w; 
-args->conv2d_11_w_bytes = 0; 
-args->batch_normalization_21_gamma = batch_normalization_21_gamma; 
-args->batch_normalization_21_gamma_bytes = 0; 
-args->batch_normalization_21_beta = batch_normalization_21_beta; 
-args->batch_normalization_21_beta_bytes = 0; 
-args->batch_normalization_21_mean = batch_normalization_21_mean; 
-args->batch_normalization_21_mean_bytes = 0; 
-args->batch_normalization_21_variance = batch_normalization_21_variance; 
-args->batch_normalization_21_variance_bytes = 0; 
-args->depthwise_conv2d_11_w = depthwise_conv2d_11_w; 
-args->depthwise_conv2d_11_w_bytes = 0; 
-args->batch_normalization_22_gamma = batch_normalization_22_gamma; 
-args->batch_normalization_22_gamma_bytes = 0; 
-args->batch_normalization_22_beta = batch_normalization_22_beta; 
-args->batch_normalization_22_beta_bytes = 0; 
-args->batch_normalization_22_mean = batch_normalization_22_mean; 
-args->batch_normalization_22_mean_bytes = 0; 
-args->batch_normalization_22_variance = batch_normalization_22_variance; 
-args->batch_normalization_22_variance_bytes = 0; 
-args->conv2d_12_w = conv2d_12_w; 
-args->conv2d_12_w_bytes = 0; 
-args->batch_normalization_23_gamma = batch_normalization_23_gamma; 
-args->batch_normalization_23_gamma_bytes = 0; 
-args->batch_normalization_23_beta = batch_normalization_23_beta; 
-args->batch_normalization_23_beta_bytes = 0; 
-args->batch_normalization_23_mean = batch_normalization_23_mean; 
-args->batch_normalization_23_mean_bytes = 0; 
-args->batch_normalization_23_variance = batch_normalization_23_variance; 
-args->batch_normalization_23_variance_bytes = 0; 
-args->depthwise_conv2d_12_w = depthwise_conv2d_12_w; 
-args->depthwise_conv2d_12_w_bytes = 0; 
-args->batch_normalization_24_gamma = batch_normalization_24_gamma; 
-args->batch_normalization_24_gamma_bytes = 0; 
-args->batch_normalization_24_beta = batch_normalization_24_beta; 
-args->batch_normalization_24_beta_bytes = 0; 
-args->batch_normalization_24_mean = batch_normalization_24_mean; 
-args->batch_normalization_24_mean_bytes = 0; 
-args->batch_normalization_24_variance = batch_normalization_24_variance; 
-args->batch_normalization_24_variance_bytes = 0; 
-args->conv2d_13_w = conv2d_13_w; 
-args->conv2d_13_w_bytes = 0; 
-args->batch_normalization_25_gamma = batch_normalization_25_gamma; 
-args->batch_normalization_25_gamma_bytes = 0; 
-args->batch_normalization_25_beta = batch_normalization_25_beta; 
-args->batch_normalization_25_beta_bytes = 0; 
-args->batch_normalization_25_mean = batch_normalization_25_mean; 
-args->batch_normalization_25_mean_bytes = 0; 
-args->batch_normalization_25_variance = batch_normalization_25_variance; 
-args->batch_normalization_25_variance_bytes = 0; 
-args->depthwise_conv2d_13_w = depthwise_conv2d_13_w; 
-args->depthwise_conv2d_13_w_bytes = 0; 
-args->batch_normalization_26_gamma = batch_normalization_26_gamma; 
-args->batch_normalization_26_gamma_bytes = 0; 
-args->batch_normalization_26_beta = batch_normalization_26_beta; 
-args->batch_normalization_26_beta_bytes = 0; 
-args->batch_normalization_26_mean = batch_normalization_26_mean; 
-args->batch_normalization_26_mean_bytes = 0; 
-args->batch_normalization_26_variance = batch_normalization_26_variance; 
-args->batch_normalization_26_variance_bytes = 0; 
-args->conv2d_14_w = conv2d_14_w; 
-args->conv2d_14_w_bytes = 0; 
-args->batch_normalization_27_gamma = batch_normalization_27_gamma; 
-args->batch_normalization_27_gamma_bytes = 0; 
-args->batch_normalization_27_beta = batch_normalization_27_beta; 
-args->batch_normalization_27_beta_bytes = 0; 
-args->batch_normalization_27_mean = batch_normalization_27_mean; 
-args->batch_normalization_27_mean_bytes = 0; 
-args->batch_normalization_27_variance = batch_normalization_27_variance; 
-args->batch_normalization_27_variance_bytes = 0; 
-args->dense_1_w = dense_1_w; 
-args->dense_1_w_bytes = 0; 
-args->dense_1_b = dense_1_b; 
-args->dense_1_b_bytes = 0; 
-
-void* dfg = __visc__launch(0, root, (void*) args); 
-
-__visc__wait(dfg); 
-
-void *result = static_cast<RootIn*>(args)->input; 
-hpvm_request_tensor(result, 0); 
-
-__visc__cleanup(); 
- computeAccuracy2(labels, 10000, result); 
-return 0; 
-
-} 
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_beta.bin
deleted file mode 100644
index bb1eb07a8e262d2f4d941578fd4c19d6a90c7562..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_gamma.bin
deleted file mode 100644
index 931c8925b89f363a41d3cf81483bde60abafba61..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_mean.bin
deleted file mode 100644
index 633bdc9fd4a9ef052ca8b6ab488a156002e3d4b5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_variance.bin
deleted file mode 100644
index f92c73f59eb5eb35ca94e3ce006e5f3c4f60ecef..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_10_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-njÏ>å‰>(è(>]÷>AuH>ôÖ.>«ïe>c*W>ÕäY>5T3>hsI>H>ó>õæ>2þS>íj[>¶†‰>]€>at>;t=>{¾D>
£¢>JcÊ>.ý`>­·&>«–>[À¢>³#Þ=W¦ä>µµ>‰Aª>f‚>Vw>T@…>\’%>"i+>¹d>qB?ÅÚ8>”>~>A>ÂŽ;>u8G>*¨3>žj>P Ê= w>Knd>­V>®ã8>ÖÃu>½Ê>¤Â?>®Ãà>P2>o·=d>ëFI>û¶Ž>‚º&>'A‡>¾úù>Ó>l±8>@MD>>ص>Ýfü=P>y;g>-ú³>–üC>¾s>Æ8>Ä=y>…>hŸ>
-T>/Ð{>ü€Ù>hæª>gw>î}>ŸC¢>à9>D6>Ð1->å2B>~.>·Ì">05­>'þD>Ï…€>²Ò3>¸”y>×Âþ=c>–O°>pëÏ>OVá=Òó‹>	û=CG'>çO_>š&>‚íw>ã’>%>»$>ÇG4>Ó¸>LP>ñ6P>n>«£$>ï’Œ>…»€>ôÅŠ>þœ>±v§>W.q>îϏ>:ëh>EäP>o¢F>Îg >DK5>êҐ>ÿ!>_FQ>q‚&>ÖwŸ>]e<>ߤ>Ô²j>³D>`PC>ýA$>&1>Îc·>TÏ7>%>™Ëy>Uð¨>QÏ>·*9>ÑÙ=UÊŽ>ä
>ªÒJ>ÁÙ=ä">Þ¶>ç>Å\a>èk1># >‹p)>/
¤> H]>ÖV>·ª>`¯>œ'T>¼3‡>²]©>6¢>¥‹·>Xã¥>óŸ=°’Ð=c¥Â>öÀ8>sQ1>¿{°>}§>>“>f…>#äá=Çi>…Â>œ®m>¡l«>£v>
=d>ØOX>T>Ìõ;>z®>•±V>ÆÝ^>K>#žç=œ9S>~F\>üg|>‰É>O»®>©"p>[ÆN>[YD>›£ý=!U>&3>Ó´>\û>v>¶,ƒ>	Žb>•=e>¹Ò>S8…>Ä03>æÇZ>³Y>†2>YÄ`>C¯r>áÍ“>îç>¼Xð=ï‚F>‚Bk>Þ?">SÅ>ÙLž> øP>ôgÚ>HÔ¤>y/>î$>˜MÄ> –•>.‚ô=©u°=å6>5@>y>t÷à=RŒ%>ø¬> >2Òj>dO%>Õ˪>&|O>EVn>Ÿ¨>íË7>­l:>HzŸ>³G’>!y¹>µÈF>
a(>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_beta.bin
deleted file mode 100644
index 5918477d3638e851c3fdfc47dc550cea3afa7d50..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_gamma.bin
deleted file mode 100644
index 6b3d705199383135bed811a6fdaa237d754487bd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_mean.bin
deleted file mode 100644
index 965edb6440d48ce4b9abc68cd3b9eb1d3f9cf3da..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_variance.bin
deleted file mode 100644
index a7a4b16bd7e581a4fdf1819ec0484559febd1fca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_11_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_beta.bin
deleted file mode 100644
index 8ade4cf080d7d3228e752d284ed500ba6300d261..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_gamma.bin
deleted file mode 100644
index 6dfb7c3833821b29f9230df806c4abc0c16a7b59..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_gamma.bin
+++ /dev/null
@@ -1,5 +0,0 @@
-	«S?#ªI?ùàn? :g?o%P?ßR†?OŸP?û<?á6?†W?ŁM?ߣH?B»d? i_?Ã{W?ÅßC?
-X?.c¥?ä=^?k÷>?Ÿ%{?\þM?uI?øM?çIž?9Qa?éØK?¢AR?K^^?¦9?À#œ?‰@˜?ÏõD?T?‡sh?ÒÀX?HèM?Ì_?J%X?¿U?ñEn?”O?nóŸ?ýH?-±¡?j%r?Ñë]?’;?VÞW?m f?™Z??á™Y?@T?ÃxU?/M?÷ŠO?j6Ž?‚™[?ÿÜn?r´D?{ÒJ?xÌV?°ïY?R€?®lH?ÎÐK?m°T?Å
m?¥Û@?P›L?ìÕ˜?'î?ò˜Y?Ä1?&±?$L?1¡¬?— G?ÚIw?˜ñ?z4? |K?ñN?,™@?a¦H?	dZ?ÈóY?s´N?Ÿ)ˆ?°yd?³ù£?†\?<èX?ŽåO?¬N?²ÚO?™?4aQ?Xy?
-)Ÿ?›^?7uF?(X?	hš?³?A3u?¸-“?«7P?=×a?œ‹C?ßøˆ?
qq?$ÚP?à߁?Šì¨?ö^?%œp?kO?”Q?Šd?_G?­ˆ??ïÞ@?½œk?<öV?¬<R?°>?.jO?„Œ?2¬Q?¥ûª?µÊY?ÓÙD?L—f?EU?c²6?O©®?Z(H?‰Š?­KX?p¦T?‚Jm?…;?ÇŸŠ?¶€?ým?­Øp?¨@?~Ó^?;öC?/€[?ÃÑ©?zÅ‹?1éH?ìT>?p b?q9^?|	K?
-ÞS?ÑE?[ô[?;ï’?/0?¬`?°µZ?FuX?o@c?eÑn?ÛvL?>š?êo”?ïèg?
-·q?2èF?AI™?j™=?7ÖZ?üó_?$ÆH?¹È_?øaW?jªƒ?ÅwT?^Ïm?•P?tZm?ns??°r^?]»U?@bk?†Ž?fŸ?ð'†?ñ´‘?ߤ?#Ñž?V'¢?[hM?fY]?
¤?’?q¼a?Ýþ[?g}¡?IJH?Š3”?ç\F?þ]?ÂS?w2D?—!V?¹IV?èõL?¡Œ_?øâ™?ÖxH?‘ùX?éH?+K8?†áV?ûR?–ß“?¶EI?˜'Z?Ñì^?¶~@?ö:–?ª¼_?
²ž?ÒÐ]?C¼·?!V?ÕK?Âc‘?9i”?XY?$¸D??Å©?V#E?>Ž`?!Z?Éñ˜?c¯U?Öæš?v9?jfX?ÌvŸ?3aV?ÍðR?qt‘?ü
?ôýE?b{B?(ß7?{I\?&	K?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_mean.bin
deleted file mode 100644
index 8899c2ad8395a98c752b1777095018cc90ca693b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_variance.bin
deleted file mode 100644
index 9206092b5ee7fa6178bb9109a9aabd5dbfaa7ccf..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_12_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_beta.bin
deleted file mode 100644
index 0f5fe8656435b28ec4b928af599b0a63915a651a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_gamma.bin
deleted file mode 100644
index c79d7d0b02b65ea9953bfd1fa164773f96e5ade0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_mean.bin
deleted file mode 100644
index 2a6d471779cb2634718545d33827ca1d8d023c07..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_variance.bin
deleted file mode 100644
index 5a2e2c8ca3645c6115b341b71141029d25064f18..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_13_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_beta.bin
deleted file mode 100644
index 79948d2a5e40f633e6675c9c8c98f186a3ae2626..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_gamma.bin
deleted file mode 100644
index 2f9a59ae913b2fcf4ef44018e295a055ea357d45..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_mean.bin
deleted file mode 100644
index 278d39b1a67c00a4015d2687ab936ddd4cbc6e34..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_variance.bin
deleted file mode 100644
index 4749c1a52d14caccf7df518ad56f2c03901dcf1a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_14_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_beta.bin
deleted file mode 100644
index 27f1a01dee6e2c9631ef312015fca880f8aa7b99..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_gamma.bin
deleted file mode 100644
index 0fe3148783c75679668beae35231fa2eb0308a8a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_mean.bin
deleted file mode 100644
index 9701d55c3d49a2d4ee43a45dad07886d62591653..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_variance.bin
deleted file mode 100644
index f679da9df83af326cc3d886528c298157ffbb561..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_15_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_beta.bin
deleted file mode 100644
index c2802a0da57a45a0839b9896a3dd0a9a70b8e669..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_gamma.bin
deleted file mode 100644
index f94cebe4a7af3a4c840c2f8b9bbb9a1ee7cb5b29..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_mean.bin
deleted file mode 100644
index a6d415f6dfd476fe1fd620794230c6d289158f50..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_variance.bin
deleted file mode 100644
index efa5fcfd7916e86848227806134efd7b4ec1e55e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_16_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_beta.bin
deleted file mode 100644
index 41201773cfd82292ab63ade568191ed261648538..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_gamma.bin
deleted file mode 100644
index 87613f6bc687bd539da0dd3fbda58e19a3e4071c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_mean.bin
deleted file mode 100644
index dee72d911fc96d785150d99101faac2905c61bb8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_variance.bin
deleted file mode 100644
index 86732c56ca1d6fa38ed0ccd379a26a7756816f7b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_17_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_beta.bin
deleted file mode 100644
index c520fdc378129c16c3c7ab8772faea68e00fd4f7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_gamma.bin
deleted file mode 100644
index 1aec3276306988ccd80ab907faba7538170d6e0e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_mean.bin
deleted file mode 100644
index cf9f6a04871515eae7a1aee7c9d103ca13bc8aae..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_variance.bin
deleted file mode 100644
index 7b46f134cd68995d45a2baab62188fd775e4ae82..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_18_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_beta.bin
deleted file mode 100644
index a4a7d99bc7b4c8f1a0d5dbdc4385036d01586d33..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_gamma.bin
deleted file mode 100644
index 60ea687e491464d474868e42dfc21ce1cd67961d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_mean.bin
deleted file mode 100644
index 2d9c9ef86608e1af225cd46ddd07d3a2bb9d5853..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_variance.bin
deleted file mode 100644
index f4e2ef2b5ae595944b6d2a4191594a2029508b1b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_19_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_beta.bin
deleted file mode 100644
index d6a711c22f8e5e9b9df5fe17fec24e12d35c20cc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ˆVT½ý¿P¾…~¬=Œ¶‚>v6R=ÑR§¾ P£¾Öw‚¾þv­>˜é ?qlk½!’?·cÜ>£
-¯¾)šs¾(ì>!<(?Œë>o÷½¹”=6X¾êjA¼eê½&\Ü>Å—I¾ÔÞP¾].^=(ÿ¤>ã g?‹r?Tõ>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_gamma.bin
deleted file mode 100644
index 9565d3b2a5ed07f2017c79534d689a729160ca46..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-9‚Œ?Ýåf?íDƒ?œ[€?ú†Ž?.8€?Z!„?L;|?胁?‹ƒ?ŽÃt?.ƒ??î2q?6Ɂ?!?o?©¢]?Žmx?ýXƒ?§‚??9­?Mº„?éÆr?f?~?>Ò~?JŒ?sh€?‰j,?üt}?Vt?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_mean.bin
deleted file mode 100644
index f552c5162cd4d3d2ed8c0edf098c2a9adbb403fd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_variance.bin
deleted file mode 100644
index 715fe55fd43af30b967ade11301595dd051a7770..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_1_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-?P¬A%l4AaAF@B|ÝAKö@È…DBB„>‘AŸó¯A‡¥@AA÷B"ôÍ?jþ@ÕŒ‘BQ-µA‹…BZBé?ö)¦D¼]øB8]MA•,AÐå;@àù€@Ê·þB¥žA¨¯ŽB²[®@¼ó^A5¬?•ÃÂ@¤œ@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_beta.bin
deleted file mode 100644
index 5291d00818ecc56eb039c71ed86d1a8e7e0f03a5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_gamma.bin
deleted file mode 100644
index 0ac1e2c1fa63ce2deb08f1b7a5aacd925749385b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_mean.bin
deleted file mode 100644
index f183a0ee683d40cc26247a32963e6321f85e7688..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_variance.bin
deleted file mode 100644
index 1d9fac8cdd2e32c1e821deaef3ad2a6bcd4cbdb9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_20_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_beta.bin
deleted file mode 100644
index 393f76218be9548b415c5b1a43a3c63a302b7300..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_gamma.bin
deleted file mode 100644
index 8b84922da7063fb41b68d983475c4c9bf91a2ac1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_mean.bin
deleted file mode 100644
index 78f070dc6515294f189e0b71692e4f61981608fc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_variance.bin
deleted file mode 100644
index e2e11c338fb2ea2a00d3aae3798ca3a2fdb82a1b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_21_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_beta.bin
deleted file mode 100644
index bf38673377e42584d82b848299c7bfb531655de5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_gamma.bin
deleted file mode 100644
index fd397b675a9a5da3fc1174a2f56f84ef3d67a8e8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_mean.bin
deleted file mode 100644
index 13549710237f51a5a9c84abf6272275396fff888..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_variance.bin
deleted file mode 100644
index 8102a808657f0b45d3a2a959bb3793c24f0c14ca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_22_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_beta.bin
deleted file mode 100644
index c396a8e2939c25d30b2021e6ca343913021309f3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_gamma.bin
deleted file mode 100644
index 0ee822b7e19677f3b7f7fcfce5456c2b1082efd7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_mean.bin
deleted file mode 100644
index fbf6f4eac60ed424271646218cb74ddaa5d74104..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_variance.bin
deleted file mode 100644
index d630a7ac1ecc23cfaeb1c88311dd6e5c6c4bbdbc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_23_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_beta.bin
deleted file mode 100644
index 3c70dadf33fe75b4e62ad704c6e4eebfe726792a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_gamma.bin
deleted file mode 100644
index 09cd79dc17aea4d5c5b6c604248a81d929170e45..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_mean.bin
deleted file mode 100644
index cbf013bcb470738d762c2cbda76745bf80ec765b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_variance.bin
deleted file mode 100644
index 0039d0bad928dee087c70a587d0e5a843790e077..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_24_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_beta.bin
deleted file mode 100644
index 0c9f7ae71b66a85ed843a45703717064be84a64c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_gamma.bin
deleted file mode 100644
index 8ae7623c12452151e9a4b100cd344f9b46121bab..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_mean.bin
deleted file mode 100644
index 062398cda6d3315629ee845e1bdd7d4623bc7493..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_variance.bin
deleted file mode 100644
index 0b5029b6aba8673c6fd7a9844c0feb4b8d7da490..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_25_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_beta.bin
deleted file mode 100644
index 1edd9d65782ee53219b97efd095a0d31af296d06..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_gamma.bin
deleted file mode 100644
index f9885c71b64218be5ce4187a9306e1869c41b5fc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_mean.bin
deleted file mode 100644
index 9d34da9b2aae4e306e7061e380168ac6bc0f7a00..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_variance.bin
deleted file mode 100644
index 2bd6648fa7d61af054f9d36916cc1975f3f351ae..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_26_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_beta.bin
deleted file mode 100644
index e6b513e4055d1394fe9eb9437b00864d570780aa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_gamma.bin
deleted file mode 100644
index 0349ab56289301dbc5d95375e0a553afb8cc8cf6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_mean.bin
deleted file mode 100644
index 8ae8e0fc3d161ef33ebd15cbdc620863332e8216..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_variance.bin
deleted file mode 100644
index 602be2e5a92239d688e30a082d79f8bec599c27f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_27_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_beta.bin
deleted file mode 100644
index c9af5d00060958d9ce8073e95c74483ba63bcbec..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-D=>½Ì-¿’?9½Ýà´9œÓj?ҒȾŽ…>%¾Ý
½–•Ó?Š?£¾Û'ã?’Z<>—Ö¿;N‹>âyh¾[ÿM?gÁU¼{³-¾¤¤=Ìr¾”öç¾65V¾Î\ʾ·C¾*\ ¾
-:`?™N<U”?~ZÈ?|•É>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_gamma.bin
deleted file mode 100644
index 59b78a30bf741b86e7bcd8346981f76749c2a981..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_mean.bin
deleted file mode 100644
index faa537236ff696e81e93fdcffef78e86c66ead9f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-©+4>뮂?zÌ?ÂÖ’?Ã?Ĥ§?˜­Þ½,c@ܶ׾¨(µ¿)Á'?ßì6ÀI+¿©Ïª>X¼@BîÞ¿N‹²>Èo¿ú©ë¾Ý±,¿óØ^>ì"¢¾‚}4?r@¹B<ÀWÇJ¿}ª¾Ûi-Àôm“¾|__>Ý¿”áÙ¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_variance.bin
deleted file mode 100644
index 9a9ec730a4aabf7b35e502daca5dfe0dbf113418..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_2_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-)s?á}†@à
’?°êû?ßn@sQb@ êR>‘¨üAÇ'¬>
-°4@곂?2p•AYd?˜Ó?Ó9A#uAKI0?¬é>“˜P?‹¥’?>U>?ŒØ<>=VŽ?æ`4AŸ‡Aj•®?Aƒ[>»[Añ ö>¼\º?}I2?ÔF@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_beta.bin
deleted file mode 100644
index dfbcff725a71852e107a04917d0a65a3544604e5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_beta.bin
+++ /dev/null
@@ -1 +0,0 @@
-z½}=»V?Ÿ–Õ>2|Š>ƒÌ¾³ç“>÷훽îX<F—X?¼Ƚ÷Ǿ¼G>ùg¾'x>àñ(>dá?]kß½};¨½ü’Ï=aƒP>þ᤾ùm?m–¾ S¼º„ûÇ=k;¨??|Úè=D@¾Võ*¾‚Ò
>±öê:\ŸV>S*Ľâ¾äTI?áD>*¥>!)Ž>šÎ>âp>ñÞ»=‡,“=P¥[=©½tyW¼¤p¾é·J>J>ió<Ï-­>–Vµ½×õS>ƒ¼²>@ã#?ÿM*¾-cŒ½ª°h?^òý=ÔXš>kŸ;½Á5×=;ŠX»UE"?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_gamma.bin
deleted file mode 100644
index ded64a0e5a70a9155c377e8a8244b85f623dee46..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-œÃ„? ·?¹?u? âz?¬~?ò”|?q?lŽƒ?‰ñ?z	‚?O€?ä	€?Hå}?Ì€z?R?óå^?ƒ¬?ª:…?(è€?zp?ñvr?óœ]?Hƒ?Agƒ?¯[‚?é+€?&1/?ìè?ÒÞw?¥}?å]€?ç?JD~?Hƒ?o(~?6ñ'?/~?EÖ~?Õz?ÆBl?ä?.€?¢ø?9g?a°ƒ?›‚?n€??O?€?È‚?…`s?’‚? ß|?äv?ŸtT?L¦‚?
¤ˆ?ù;ï>èÜ€?-±v?êj?Æ#‡?ˆ×~?KSQ?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_mean.bin
deleted file mode 100644
index 058394e6ac8c95cec8fb6050daf47289e8c81b48..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_mean.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ø5¿›‚Ï?Žî–¾W„ÀÑŽ">}‚IÀ°ì½ÿÕ@¿©@‹ûÏ?&gü>ŒíÅ?Ã~Á>?34¿mëN?y’1?aŒ> ß¾ÞÀb„,À—qâ?j%„¿
-@ÓÀÜ|ÀÔ{™?ž´·?D @¬]î¾T„>tí¾¼#ˆ?Ôž–?qŽ­¿öÂ:¿0Ž>¡Ji?ܽ¤¾Òá?õwʼöX¿™<¿åÀ7aD?Ê?°~²?ÿŒN¾8„å?ß
¿‡U?°Í§¾ß¬?§EP?€½JO?¡x@ÏcF@¦‹@Ù_@“Ô³¿bèà?–Ô¾ò÷¾0óµ¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_variance.bin
deleted file mode 100644
index d5dba0a9275910fdded47a2604453ae46f611c16..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_3_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-:Â@ª÷í?P&å?{¯Œ@õŽw?ÉI@ÔûŒ?Ç—?Ó¨û?†AÆ>sÎ?Bˆ??þÙ?Ú’{?·è@ÀÖÆ?ç¹%@s½–?F.F@ƒªí?â¹®?A<@Ÿ?QN»?N?˧“?ÁÌ@
…ä>M&?³²?¬“G?³®¯?å§¹?×mt?®–â?œv?Ÿ/Ö?Z0?AÒG@ÄSµ?û<?Î
É?*ëx?œLÒ?é5ƒ?ÃǪ?TÎ@«8(@0]?ß3@QÆ£?kä?»,?%!©?6ï?3or@¤û
@ÌO?•m’?{Ÿ?“¨ô?fêZ?lg¿?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_beta.bin
deleted file mode 100644
index 70ddacf8f0bd27523892f5af52ded3302c4715d4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_gamma.bin
deleted file mode 100644
index 3f64ef0b25bb6e00a6012f360e65812d22ca672f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-Ïeq?€¨¦?ލ?­ÇK?SM?^ãp?°‰?Ð9„?Ñ—»?i”^? |?kf?¿áv?bÿb?í÷ƒ?”tÄ?°Ì1?)·?²ù‡?½W?2Et?½e{?ÊÝ‘?Ñu?ìzŠ?RC†?ßê?÷EV?/V?<m?9v?ª³(?:‰”??SɁ?ß½±?P36?z×Y?Þçs?å[?ªrb?^û:?QwC?XŽz?Ö‰c?Hó‚?6*g?ó“4?Ñ¡`?92’?,Œ?Ê€?"X?¡ße?+¸™?õk…?*#Ï?Ÿ+Ó?óz?	Њ?oM?4Í“?Ó/?ç’Š?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_mean.bin
deleted file mode 100644
index 28c78d2db90aadc66f0d1f7d647e32044fd12744..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_variance.bin
deleted file mode 100644
index 8f361cbf915cd5fb93f32847280d50dad8e9b791..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_4_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-õäe>À(?nE?¨Ú>[½i>ÑO'?à(t?„kG?(‰d?Áxâ?òLÍ>Œi?ˆ`·>´ž?wÒo?•­ï?LÙé?@¯„?ܽC?š5ˆ?Mâ,?Ã0Ð>4£Ô>©ËÀ>—­}>¨`>„¶>ú]’?Ø@·)@šZ@ß5R@øÿF?Án¯>Á5ç>º}œ?à ?Ðÿ>ãš@R‚œ?¾Å—?fô>éž­?²'^@!vÝ>2,?)©®>®}A§ß†?aåm@9\?¯Ä?S©…?ë´?ÆB‹?ƒµ>ªâb?´bW?ÈC?FH?ʇ›?߀?Uã? ?ˆ?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_beta.bin
deleted file mode 100644
index 37161ae89f38c6489ae9ed0d99ad2df5a5f2f093..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_gamma.bin
deleted file mode 100644
index efefd0af2fbdc436d3321906166debd0323c1571..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_mean.bin
deleted file mode 100644
index 7eb215a96c6fb385ec761cf16be0339f3656b717..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_variance.bin
deleted file mode 100644
index d220b9e27ad8cd5a7b4bcba39105c8ee969bc4f3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_5_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-Oús?×_?,Ì?´È^?’ŽÊ?ôó7?öÌ@®‘Æ?ü9ž?v©Ž?Àeø?±\›?)9?S\U?¥B¼?ú¸’?ºc¤?Qúì?øZ3?'?o?š|?þ&¾?–_’?µ¼€?1Ë?+*O?%‹?g»
-?ØR?)Í>?ëD“?غÿ>UŽ?#®6?ªº‰?)·;?wÙ’?0®]?çjg?-B?Aâ?RÌ?öŠI?[O9?fBB?Ý[?ôl?âF?ª´º?éË’?µ”@µ?È‚?\h@?µ¾'?D?Jx?°Ç(@ë,§?É%?áæD?Éa?j¥„?: :?‡Ò«?!·q?çò¤?FG+?³wY?;j?ÇSe?h>Â?>¬?õJ‡?¥GÎ?É”1?ôŸÇ@l)’?Ç`?ë.V?J+?'çü?H«Ø?)à©?™?p?mr?ôÈA?vª?ÿ²š?æJŸ?›5?lK?g.†?•bŠ?O{ˆ?3Tk?µY?\hœ?òa?®Pt?JM%?^?êC?êtª?ÏŸ?”µ?ÞuT?B–?8w?S‰?p4›?XR?âf@?ïÄO?NÙk?ðɉ?Ԑ?´*?l;r?üüØ?<¡{?°×>>¢?Y‹?e8€?‘Њ?!?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_beta.bin
deleted file mode 100644
index 39a7a8779dc5ba6a394748a88391fbbf8b35ec23..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_gamma.bin
deleted file mode 100644
index 8bfc97196078b732c1ab61e8a3bbb656d29d3728..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-–¼‹?B„?çd?Ó5?Mn?p¶I?¤éº?«›g?µy?é˜?·P\?œF?ˆ5?*ç:?íR‡?+Q\?&ˆ?µqN?!
-G?ºU?üŠ? nt?òÂ(?w£?g›{?•óH?,G‘?Ó’?ÎÛŽ?Ɔ?Ç>’?<‚ƒ?2¿›?˜ºx?˯<?€Z7?Ó!?¾ç}?$‰?¼æp?ÎT?þrR?ç´›?WŽJ?µ’?]>c?ùQ?-±‰?îÓ`?¦”Y?”Úp?TÉt?|ˆ‚?óA?²‚?‹}‡?¬'j?9Î?¾N‡?`ŽW?FË?€z?X{?Áœ5?©«o?yS.?ê{?FÆ8?
µ›??Ï~?ÛæT?_)I?+Yw?õãw?D~?Bû?Ñ"Ò?†­?Âsƒ?>ÞW?>6?¼¶¤?zûD?Úé†?³ˆ?ŸM?à`?G{'?¼¾–?=Â]?Ú€~?»R˜?˜®ˆ?ßMz?Ÿdu?͈U?œö1?£Ü–?ž,„?\ÓŠ?<Vf?{8)?“&o?ݬˆ?t~—?ïï•?ôÆ”?D…?X/)?`Ú?@œ ?cj?-ƒ?“?¿Às?hím?o?åÅ—?Æ+‘?ùW‚?²F?ßø‚?>F?@–m?¥m?߯?ì‚?ì3?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_mean.bin
deleted file mode 100644
index f427d142f3bf2147d302426700b2f0ee817ec308..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_variance.bin
deleted file mode 100644
index 4c571acca77f147260874e9ae0ff1722076746ca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_6_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_beta.bin
deleted file mode 100644
index 4e72081f35c879ebc0d0bc57e3ced79a81200854..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_gamma.bin
deleted file mode 100644
index e8ac9fe5f793a80b78c9a2099d37a96d093097ba..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ORa?Œ^€?o'€?Tpy?b‘F?ø,~?¬£?€ž€?B‹u?ÿœ€?g“z?²€?d}?j~?_gg?o?šÏf?.:k?#*\?Ò€?û;o?Ïö?L€?Yy?ÁR€?Zíp?lVq?¸€?GXZ?±Ä?Ñ€?—M~?¯¼q?8|€?›Dk?j?6ØT?\,€?¦€?®†?_?Öþ|?wòl?jSn?«€?qè?”Zn?!ªv?€ƒr?æ\v?	„~?z]s?”€?¯*x?|U_?N€?å*?üU€?FÚu?ïÇn?«T?á/y?”™€?ÄóS?*ÿq?L4€?Su?Ãq?q`w?´f?Z2~?È)?Ø<€?­x€?0¥~?0;e?Být?tß~?¸S{?
þ?+T€?{á?<†~?[?
-C~?»@?81}?€‹?$€?ƒ»?Z\}?/ÉV?T·~?4|?Lr?¶‹d?ˆ i? Úg?þ„€?âL?¼wu?›€?¾Ù{?«
i?à6€?«Úk?–•v?Èyn?×d?X¹j?À?Ód?¸º~?‘s?Íá?#Çl?øg}?"I€?åNx?wÕ?ú˜x?äe|?‰dr?Ö/€?zw?À|€?ïlu?Øn~?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_mean.bin
deleted file mode 100644
index 42ec4b5d965a8dc26c8d6218195e1c87739fb9fa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_variance.bin
deleted file mode 100644
index 17911f473710c3e37246c1de1a4121be21585ee2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_7_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_beta.bin
deleted file mode 100644
index c3b0b374f59d9c906906d51621a99704e26ed422..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_gamma.bin
deleted file mode 100644
index b271fb02201a3f354162e281cf1bac5998ed28a2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_mean.bin
deleted file mode 100644
index 5888235eb54a3c9ad548a51708eb39c13d7e8ddd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_mean.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-N¡>Ða¿éNR¿Þ0¾>	Ô>é«8=J_¿í™
-¿À0?*ö!¿«£½>Ï¿VhE¿„«>Ð¥>˜¸Ë>S{„>U¼>
Çt>?+¿}¼>ØÀ>mÈ¿ŒOT¿]+¿9½>k„Ú>Gã`=Õ?E>o’ƒ¾É“7¿6ã<¾~Ä>N÷¿ýÄ>ÝŠ[¿È½ >2\¿7Ò.¿77e¿yß¶>ŠÁÏ>°|¸>ßRÊ>… ¿™e¡½±#>˜"µ>t´>NÚ>¯¾*°?SÏ¿6g×>dX^>M¿=í¿§…×>×ʼ>ÍՐ>l¨>ÙqC¿™gS>‹¤>b+¿g¿Þ>sº½>3È
-?&,©>Èâ¸>p‹¿u†¾Éß+¿ÎÂÍ>jÑU>èÞr>P>½ûä?áØ2¿ñ)¿Ñ
¿ÜP¿D½‚>Ú_¿çã&>ø|²> 9¿$6 ?Z3­=ÍYL¿#’>˜3*¿©ö;ÐOŽ>œô›>¸ŠŠ>?*¥>¹ÑS¿Îz˼Bð>±ï<¿aC<~®>/ªI¿”vº>œ¸>¨á¬>GÀ>-Y¯>'¿­™>Æw.¿Áî¥>½s3¿Ö8®>e“Ñ>¼¿ï¯‚>gCV½À|ç>óhá>¼æÀ>Sç:¿:Á>Ó¿˜ý¨>…±¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_variance.bin
deleted file mode 100644
index be017b25adccfc236b22789abd11b0ff50fb5a40..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_8_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_beta.bin
deleted file mode 100644
index 13e7e2a820d8c80f79e05b91540c0d5493387306..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_gamma.bin
deleted file mode 100644
index 4d65230c8dc292bceb2414527469eca65674af13..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_mean.bin
deleted file mode 100644
index 67b8b25e4fff4232001931073a803f3dfe363187..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_variance.bin
deleted file mode 100644
index 59a0b1e0d59434dfb9d94f4cefdcfab4cdec0b93..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/batch_normalization_9_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_10_w.bin
deleted file mode 100644
index 2000dbf19acd71e28da72db217f7f34d80be4d55..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_11_w.bin
deleted file mode 100644
index e38c7f59fa6346b7a4c1c2e676cec648277986aa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_12_w.bin
deleted file mode 100644
index fd7b6121bdd5b28f0c65caec9e90676d9ccc2171..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_13_w.bin
deleted file mode 100644
index 2a6a844fa8e1ee98017c3d1e3a9024f39c6f1568..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_14_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_14_w.bin
deleted file mode 100644
index ff22cedb2ef6ef7aaffbf434d5dae78cf813de27..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_14_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_1_w.bin
deleted file mode 100644
index bafe4f5ad48926ac6a00086e2e9ce2cda85bd9ec..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_2_w.bin
deleted file mode 100644
index eff0fc063670e2a30c86b70b2611787f454db6fb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_3_w.bin
deleted file mode 100644
index e09cda44638fd9f0032b47d6f5fc7ece69cd24b8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_4_w.bin
deleted file mode 100644
index ce941bc4965f21e57f6b6cab24639d8bab593b6e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_5_w.bin
deleted file mode 100644
index 12a7e35468d1d003b9f65b4a515f82c4a2f42ca6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_6_w.bin
deleted file mode 100644
index 15c80714155c176c53788c7a4926ae90d6a50a54..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_7_w.bin
deleted file mode 100644
index aabaa5eb3ce76dba62573d51d7b63d037df1ce82..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_8_w.bin
deleted file mode 100644
index ad954d098872fcf34792606a50d7e46c6a0008c6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_9_w.bin
deleted file mode 100644
index 50ea54350fc605740424c8b6e5a48cbe7846181b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/dense_1_b.bin
deleted file mode 100644
index 1e697e20d8008cba5750a47aa9a53d8b29b1b0e2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/dense_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/dense_1_w.bin
deleted file mode 100644
index 9105f0e8d7739016cce69125dee5e8102d67c8d8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_10_w.bin
deleted file mode 100644
index f7cbc07e8ef10d1c910e8cb8e0880a263f944d4e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_11_w.bin
deleted file mode 100644
index c9fb2daae05c1272ee93cf8dfd817e08591834e1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_12_w.bin
deleted file mode 100644
index 58c263417c0669304fff4416cd7c45dc001d4f81..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_13_w.bin
deleted file mode 100644
index 36d45717f5a1435df7c2cecca1353ca326ea98f9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_1_w.bin
deleted file mode 100644
index 0224a1a1465811bf5768565cc637a9757e8db9c2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_2_w.bin
deleted file mode 100644
index 33c3af23f2fee0a9bd871d3e95c26d17b7108c29..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_3_w.bin
deleted file mode 100644
index 1bcfbd7df4591bde2936e7ccfa9b1f10cf9f0d1e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_4_w.bin
deleted file mode 100644
index 49a61f541371dd83a76c5efa90cd9ec3eaa13de0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_5_w.bin
deleted file mode 100644
index d488d6077e6a7e13a9bf8fbd9eb67fa735d6befe..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_6_w.bin
deleted file mode 100644
index 7ab35e18d4824343230e241e3c6ecfcc20b57b83..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_7_w.bin
deleted file mode 100644
index 569a5573a4f9a5a3f7fb87361b30f361abcff2cb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_8_w.bin
deleted file mode 100644
index 10dc6502f6d0c128cdeae1fd07359be2bc500981..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_9_w.bin
deleted file mode 100644
index 9112cb3cc2eb816e5e3592b00cd331c23b185b1d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/depthwise_conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/input.bin
deleted file mode 100644
index 7a6fbc28f5a947a90863278a5249303f9f52741b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/layer_composition.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/layer_composition.txt
deleted file mode 100644
index 10692997a90e4490a91ad3d0e6e04285754144fd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/layer_composition.txt
+++ /dev/null
@@ -1,83 +0,0 @@
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-activation  
-pool  
-dense  add  
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/layers.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/layers.txt
deleted file mode 100644
index 0bd2b554374c10d748a652f52e5427c716be0084..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/layers.txt
+++ /dev/null
@@ -1,83 +0,0 @@
-Conv1,10000,3,32,32,32,3,3,3
-#tensorBatchNorm1
-#tensorRelu1
-#tensorDepthwiseConv1
-#tensorBatchNorm2
-#tensorRelu2
-Conv2,10000,32,32,32,64,32,1,1
-#tensorBatchNorm3
-#tensorRelu3
-#tensorDepthwiseConv2
-#tensorBatchNorm4
-#tensorRelu4
-Conv3,10000,64,16,16,128,64,1,1
-#tensorBatchNorm5
-#tensorRelu5
-#tensorDepthwiseConv3
-#tensorBatchNorm6
-#tensorRelu6
-Conv4,10000,128,16,16,128,128,1,1
-#tensorBatchNorm7
-#tensorRelu7
-#tensorDepthwiseConv4
-#tensorBatchNorm8
-#tensorRelu8
-Conv5,10000,128,8,8,256,128,1,1
-#tensorBatchNorm9
-#tensorRelu9
-#tensorDepthwiseConv5
-#tensorBatchNorm10
-#tensorRelu10
-Conv6,10000,256,8,8,256,256,1,1
-#tensorBatchNorm11
-#tensorRelu11
-#tensorDepthwiseConv6
-#tensorBatchNorm12
-#tensorRelu12
-Conv7,10000,256,4,4,512,256,1,1
-#tensorBatchNorm13
-#tensorRelu13
-#tensorDepthwiseConv7
-#tensorBatchNorm14
-#tensorRelu14
-Conv8,10000,512,4,4,512,512,1,1
-#tensorBatchNorm15
-#tensorRelu15
-#tensorDepthwiseConv8
-#tensorBatchNorm16
-#tensorRelu16
-Conv9,10000,512,4,4,512,512,1,1
-#tensorBatchNorm17
-#tensorRelu17
-#tensorDepthwiseConv9
-#tensorBatchNorm18
-#tensorRelu18
-Conv10,10000,512,4,4,512,512,1,1
-#tensorBatchNorm19
-#tensorRelu19
-#tensorDepthwiseConv10
-#tensorBatchNorm20
-#tensorRelu20
-Conv11,10000,512,4,4,512,512,1,1
-#tensorBatchNorm21
-#tensorRelu21
-#tensorDepthwiseConv11
-#tensorBatchNorm22
-#tensorRelu22
-Conv12,10000,512,4,4,512,512,1,1
-#tensorBatchNorm23
-#tensorRelu23
-#tensorDepthwiseConv12
-#tensorBatchNorm24
-#tensorRelu24
-Conv13,10000,512,2,2,1024,512,1,1
-#tensorBatchNorm25
-#tensorRelu25
-#tensorDepthwiseConv13
-#tensorBatchNorm26
-#tensorRelu26
-Conv14,10000,1024,2,2,1024,1024,1,1
-#tensorBatchNorm27
-#tensorRelu27
-#tensorPooling1
-FC1,10000,1024,1024,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/promise_src.cc
deleted file mode 100644
index 146bc640cc4b1e8da65e3e7bb6cb5c7f2a007399..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/promise_src.cc
+++ /dev/null
@@ -1,420 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-int total_runs = 100; 
-for (int i = 0 ; i < total_runs; i++){ 
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-
-
-std::string dir_prefix = std::string("data/mobilenet_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -2.196306920051575, 1.347581704139706, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -60.89275047302246, 51.99256916046146, 9); 
-void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_2 = tensorRelu(var_1); 
-void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_5 = tensorRelu(var_4); 
-void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 5.713541553974245, conv2d_2_w, -0.9317721160650253, 1.0774258937835774, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.518589503288269, 6.810842518806449, 9); 
-void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_8 = tensorRelu(var_7); 
-void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_11 = tensorRelu(var_10); 
-void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.932139402866376, conv2d_3_w, -0.5316544661521911, 0.5753790403604531, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.482631235122681, 3.96730119752885, 9); 
-void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_14 = tensorRelu(var_13); 
-void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_17 = tensorRelu(var_16); 
-void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.103263397693674, conv2d_4_w, -0.36234098821878435, 0.4076913900375366, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.04261828327179, 3.88677932929993, 9); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-void* var_22 = tensorBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_23 = tensorRelu(var_22); 
-void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 5.383221302509475, conv2d_5_w, -0.3131200549006462, 0.29357679939270065, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.921469215393066, 4.338679324150087, 9); 
-void* var_25 = tensorBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_26 = tensorRelu(var_25); 
-void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-void* var_28 = tensorBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_29 = tensorRelu(var_28); 
-void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 4.316738154411368, conv2d_6_w, -0.23299247801303866, 0.2580290257930756, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.207789947509766, 3.932436970710759, 9); 
-void* var_31 = tensorBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_32 = tensorRelu(var_31); 
-void* var_33 = tensorConvolution(var_32, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-void* var_34 = tensorBatchNorm(var_33, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_35 = tensorRelu(var_34); 
-void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 5.830408106803901, conv2d_7_w, -0.20233777219057084, 0.18998308175802117, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.298286915779113, 4.848135117530843, 9); 
-void* var_37 = tensorBatchNorm(var_36, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorConvolution(var_38, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-void* var_40 = tensorBatchNorm(var_39, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-void* var_41 = tensorRelu(var_40); 
-void* var_42 = ConvLayer_PROMISE(var_41, 0.0, 4.446417809963227, conv2d_8_w, -0.17442735651135444, 0.17695830866694454, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.347910885810852, 3.6144364695549145, 9); 
-void* var_43 = tensorBatchNorm(var_42, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-void* var_44 = tensorRelu(var_43); 
-void* var_45 = tensorConvolution(var_44, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-void* var_46 = tensorBatchNorm(var_45, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-void* var_47 = tensorRelu(var_46); 
-void* var_48 = ConvLayer_PROMISE(var_47, 0.0, 4.518095604896667, conv2d_9_w, -0.14546796187758446, 0.15256431668996823, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.0287702755928043, 2.9487365779876953, 9); 
-void* var_49 = tensorBatchNorm(var_48, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-void* var_50 = tensorRelu(var_49); 
-void* var_51 = tensorConvolution(var_50, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-void* var_52 = tensorBatchNorm(var_51, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-void* var_53 = tensorRelu(var_52); 
-void* var_54 = ConvLayer_PROMISE(var_53, 0.0, 6.348575634956407, conv2d_10_w, -0.13025874522328376, 0.13558243343234128, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.2293100805282595, 3.5315046372413645, 9); 
-void* var_55 = tensorBatchNorm(var_54, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-void* var_56 = tensorRelu(var_55); 
-void* var_57 = tensorConvolution(var_56, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-void* var_58 = tensorBatchNorm(var_57, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-void* var_59 = tensorRelu(var_58); 
-void* var_60 = ConvLayer_PROMISE(var_59, 0.0, 5.221003110408843, conv2d_11_w, -0.11900172759592534, 0.12536374783515936, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.038203780174255, 4.004009407043483, 9); 
-void* var_61 = tensorBatchNorm(var_60, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-void* var_62 = tensorRelu(var_61); 
-void* var_63 = tensorConvolution(var_62, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-void* var_64 = tensorBatchNorm(var_63, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-void* var_65 = tensorRelu(var_64); 
-void* var_66 = ConvLayer_PROMISE(var_65, 0.0, 5.732498347759442, conv2d_12_w, -0.10839721685647964, 0.11625668607652187, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.3111015114784244, 4.462933233261136, 9); 
-void* var_67 = tensorBatchNorm(var_66, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-void* var_68 = tensorRelu(var_67); 
-void* var_69 = tensorConvolution(var_68, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-void* var_70 = tensorBatchNorm(var_69, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-void* var_71 = tensorRelu(var_70); 
-void* var_72 = ConvLayer_PROMISE(var_71, 0.0, 7.240498211860681, conv2d_13_w, -0.08623744961619377, 0.08859449951350662, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.175431394577027, 6.2043294754027345, 9); 
-void* var_73 = tensorBatchNorm(var_72, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-void* var_74 = tensorRelu(var_73); 
-void* var_75 = tensorConvolution(var_74, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-void* var_76 = tensorBatchNorm(var_75, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-void* var_77 = tensorRelu(var_76); 
-void* var_78 = ConvLayer_PROMISE(var_77, 0.0, 7.813958834648251, conv2d_14_w, -0.06813025139272214, 0.07002027779817581, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -10.920566423416137, 2.6442912578582534, 9); 
-void* var_79 = tensorBatchNorm(var_78, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-void* var_80 = tensorRelu(var_79); 
-void* var_81 = tensorPooling(var_80,1,2,2,0,0,2,2); 
-void* var_82 = FCLayer_PROMISE(var_81, 0.0, 2.8692066650391013, dense_1_w, -0.22301019695401192, 0.1442659378200768, dense_1_b, -0.1654396, 0.23336112, -1, -12.245949958801269, 23.80532513427739, 9); 
-void* var_83 = tensorSoftmax(var_82); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_83); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-}
-
-dumpExecutionAccuracies(); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/src.cc
deleted file mode 100644
index 25aec9bde3bc1aac157e2acc368dcddf866e455d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_quant/src.cc
+++ /dev/null
@@ -1,413 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("data/mobilenet_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,512,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,512,1,3,3); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,1024,512,1,1); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,1024,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,1024,1,3,3); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,1024,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,1024,1024,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,1024,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,1024,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,1024,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_2 = tensorRelu(var_1); 
-void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_6 = tensorRelu(var_5); 
-void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_9 = tensorRelu(var_8); 
-void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_13 = tensorRelu(var_12); 
-void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_16 = tensorRelu(var_15); 
-void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_23 = tensorRelu(var_22); 
-void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_28 = tensorRelu(var_27); 
-void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_31 = tensorRelu(var_30); 
-void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_35 = tensorRelu(var_34); 
-void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_38 = tensorRelu(var_37); 
-void* var_41 = tensorConvolution(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_43 = tensorRelu(var_42); 
-void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_46 = tensorRelu(var_45); 
-void* var_48 = tensorConvolution(var_46, depthwise_conv2d_7_w, 1, 1, 1, 1, 1, 512); 
-void* var_49 = tensorBatchNorm(var_48, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-void* var_50 = tensorRelu(var_49); 
-void* var_51 = tensorConvolution(var_50, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-void* var_52 = tensorBatchNorm(var_51, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-void* var_53 = tensorRelu(var_52); 
-void* var_55 = tensorConvolution(var_53, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 512); 
-void* var_56 = tensorBatchNorm(var_55, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-void* var_57 = tensorRelu(var_56); 
-void* var_58 = tensorConvolution(var_57, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-void* var_59 = tensorBatchNorm(var_58, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-void* var_60 = tensorRelu(var_59); 
-void* var_63 = tensorConvolution(var_60, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 512); 
-void* var_64 = tensorBatchNorm(var_63, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-void* var_65 = tensorRelu(var_64); 
-void* var_66 = tensorConvolution(var_65, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-void* var_67 = tensorBatchNorm(var_66, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-void* var_68 = tensorRelu(var_67); 
-void* var_70 = tensorConvolution(var_68, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 512); 
-void* var_71 = tensorBatchNorm(var_70, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-void* var_72 = tensorRelu(var_71); 
-void* var_73 = tensorConvolution(var_72, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-void* var_74 = tensorBatchNorm(var_73, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-void* var_75 = tensorRelu(var_74); 
-void* var_77 = tensorConvolution(var_75, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 512); 
-void* var_78 = tensorBatchNorm(var_77, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-void* var_79 = tensorRelu(var_78); 
-void* var_80 = tensorConvolution(var_79, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-void* var_81 = tensorBatchNorm(var_80, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-void* var_82 = tensorRelu(var_81); 
-void* var_85 = tensorConvolution(var_82, depthwise_conv2d_12_w, 1, 1, 2, 2, 1, 512); 
-void* var_86 = tensorBatchNorm(var_85, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-void* var_87 = tensorRelu(var_86); 
-void* var_88 = tensorConvolution(var_87, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-void* var_89 = tensorBatchNorm(var_88, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-void* var_90 = tensorRelu(var_89); 
-void* var_92 = tensorConvolution(var_90, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 1024); 
-void* var_93 = tensorBatchNorm(var_92, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-void* var_94 = tensorRelu(var_93); 
-void* var_95 = tensorConvolution(var_94, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-void* var_96 = tensorBatchNorm(var_95, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-void* var_97 = tensorRelu(var_96); 
-void* var_99 = tensorPooling(var_97,1,2,2,0,0,2,2); 
-void* var_101 = tensorGemmGPU(var_99, dense_1_w); 
-void* var_102 = tensorAdd(var_101, dense_1_b); 
-void* var_103 = tensorSoftmax(var_102); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_103); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/approxhpvm_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/approxhpvm_src.cc
deleted file mode 100644
index dc0c873c63333299981591cb5654cb38be9d4092..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/approxhpvm_src.cc
+++ /dev/null
@@ -1,1224 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/stat.h> 
-#include <cstring> 
-#include <visc.h> 
-#include <tensorTypes.h> 
-#include <tensorUtils.h> 
-
-void var_0_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_1_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_2_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_3_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 32); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_4_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_5_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_6_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_7_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_8_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_9_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 64); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_10_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_11_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_12_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_13_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_14_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_15_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 128); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_16_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_17_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_18_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_19_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_20_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_21_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 128); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_22_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_23_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_24_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_25_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_26_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_27_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 256); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_28_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_29_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_30_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_31_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_32_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_33_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 256); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_34_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_35_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_36_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_37_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_38_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_39_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_avg(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_40_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_mul(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_41_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_42_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_softmax(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void root(void* input, size_t input_bytes, 
-	  void* conv2d_1_w, size_t conv2d_1_w_bytes, 
-	  void* batch_normalization_1_gamma, size_t batch_normalization_1_gamma_bytes, 
-	  void* batch_normalization_1_beta, size_t batch_normalization_1_beta_bytes, 
-	  void* batch_normalization_1_mean, size_t batch_normalization_1_mean_bytes, 
-	  void* batch_normalization_1_variance, size_t batch_normalization_1_variance_bytes, 
-	  void* depthwise_conv2d_1_w, size_t depthwise_conv2d_1_w_bytes, 
-	  void* batch_normalization_2_gamma, size_t batch_normalization_2_gamma_bytes, 
-	  void* batch_normalization_2_beta, size_t batch_normalization_2_beta_bytes, 
-	  void* batch_normalization_2_mean, size_t batch_normalization_2_mean_bytes, 
-	  void* batch_normalization_2_variance, size_t batch_normalization_2_variance_bytes, 
-	  void* conv2d_2_w, size_t conv2d_2_w_bytes, 
-	  void* batch_normalization_3_gamma, size_t batch_normalization_3_gamma_bytes, 
-	  void* batch_normalization_3_beta, size_t batch_normalization_3_beta_bytes, 
-	  void* batch_normalization_3_mean, size_t batch_normalization_3_mean_bytes, 
-	  void* batch_normalization_3_variance, size_t batch_normalization_3_variance_bytes, 
-	  void* depthwise_conv2d_2_w, size_t depthwise_conv2d_2_w_bytes, 
-	  void* batch_normalization_4_gamma, size_t batch_normalization_4_gamma_bytes, 
-	  void* batch_normalization_4_beta, size_t batch_normalization_4_beta_bytes, 
-	  void* batch_normalization_4_mean, size_t batch_normalization_4_mean_bytes, 
-	  void* batch_normalization_4_variance, size_t batch_normalization_4_variance_bytes, 
-	  void* conv2d_3_w, size_t conv2d_3_w_bytes, 
-	  void* batch_normalization_5_gamma, size_t batch_normalization_5_gamma_bytes, 
-	  void* batch_normalization_5_beta, size_t batch_normalization_5_beta_bytes, 
-	  void* batch_normalization_5_mean, size_t batch_normalization_5_mean_bytes, 
-	  void* batch_normalization_5_variance, size_t batch_normalization_5_variance_bytes, 
-	  void* depthwise_conv2d_3_w, size_t depthwise_conv2d_3_w_bytes, 
-	  void* batch_normalization_6_gamma, size_t batch_normalization_6_gamma_bytes, 
-	  void* batch_normalization_6_beta, size_t batch_normalization_6_beta_bytes, 
-	  void* batch_normalization_6_mean, size_t batch_normalization_6_mean_bytes, 
-	  void* batch_normalization_6_variance, size_t batch_normalization_6_variance_bytes, 
-	  void* conv2d_4_w, size_t conv2d_4_w_bytes, 
-	  void* batch_normalization_7_gamma, size_t batch_normalization_7_gamma_bytes, 
-	  void* batch_normalization_7_beta, size_t batch_normalization_7_beta_bytes, 
-	  void* batch_normalization_7_mean, size_t batch_normalization_7_mean_bytes, 
-	  void* batch_normalization_7_variance, size_t batch_normalization_7_variance_bytes, 
-	  void* depthwise_conv2d_4_w, size_t depthwise_conv2d_4_w_bytes, 
-	  void* batch_normalization_8_gamma, size_t batch_normalization_8_gamma_bytes, 
-	  void* batch_normalization_8_beta, size_t batch_normalization_8_beta_bytes, 
-	  void* batch_normalization_8_mean, size_t batch_normalization_8_mean_bytes, 
-	  void* batch_normalization_8_variance, size_t batch_normalization_8_variance_bytes, 
-	  void* conv2d_5_w, size_t conv2d_5_w_bytes, 
-	  void* batch_normalization_9_gamma, size_t batch_normalization_9_gamma_bytes, 
-	  void* batch_normalization_9_beta, size_t batch_normalization_9_beta_bytes, 
-	  void* batch_normalization_9_mean, size_t batch_normalization_9_mean_bytes, 
-	  void* batch_normalization_9_variance, size_t batch_normalization_9_variance_bytes, 
-	  void* depthwise_conv2d_5_w, size_t depthwise_conv2d_5_w_bytes, 
-	  void* batch_normalization_10_gamma, size_t batch_normalization_10_gamma_bytes, 
-	  void* batch_normalization_10_beta, size_t batch_normalization_10_beta_bytes, 
-	  void* batch_normalization_10_mean, size_t batch_normalization_10_mean_bytes, 
-	  void* batch_normalization_10_variance, size_t batch_normalization_10_variance_bytes, 
-	  void* conv2d_6_w, size_t conv2d_6_w_bytes, 
-	  void* batch_normalization_11_gamma, size_t batch_normalization_11_gamma_bytes, 
-	  void* batch_normalization_11_beta, size_t batch_normalization_11_beta_bytes, 
-	  void* batch_normalization_11_mean, size_t batch_normalization_11_mean_bytes, 
-	  void* batch_normalization_11_variance, size_t batch_normalization_11_variance_bytes, 
-	  void* depthwise_conv2d_6_w, size_t depthwise_conv2d_6_w_bytes, 
-	  void* batch_normalization_12_gamma, size_t batch_normalization_12_gamma_bytes, 
-	  void* batch_normalization_12_beta, size_t batch_normalization_12_beta_bytes, 
-	  void* batch_normalization_12_mean, size_t batch_normalization_12_mean_bytes, 
-	  void* batch_normalization_12_variance, size_t batch_normalization_12_variance_bytes, 
-	  void* conv2d_7_w, size_t conv2d_7_w_bytes, 
-	  void* batch_normalization_13_gamma, size_t batch_normalization_13_gamma_bytes, 
-	  void* batch_normalization_13_beta, size_t batch_normalization_13_beta_bytes, 
-	  void* batch_normalization_13_mean, size_t batch_normalization_13_mean_bytes, 
-	  void* batch_normalization_13_variance, size_t batch_normalization_13_variance_bytes, 
-	  void* dense_1_w, size_t dense_1_w_bytes, 
-	  void* dense_1_b, size_t dense_1_b_bytes){ 
-
-
-  __visc__hint(visc::CPU_TARGET); 
-  __visc__attributes(68, input, conv2d_1_w, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, depthwise_conv2d_1_w, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, conv2d_2_w, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, depthwise_conv2d_2_w, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, conv2d_3_w, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, depthwise_conv2d_3_w, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, conv2d_4_w, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, depthwise_conv2d_4_w, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, conv2d_5_w, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, depthwise_conv2d_5_w, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, conv2d_6_w, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, depthwise_conv2d_6_w, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, conv2d_7_w, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, dense_1_w, dense_1_b, 0); 
-
-
-  void* var_0 = __visc__createNodeND(0, var_0_node); 
-
-  __visc__bindIn(var_0, 0, 0, 0); 
-  __visc__bindIn(var_0, 1, 1, 0); 
-  __visc__bindIn(var_0, 2, 2, 0); 
-  __visc__bindIn(var_0, 3, 3, 0); 
-
-  void* var_1 = __visc__createNodeND(0, var_1_node); 
-
-  __visc__edge(var_0, var_1, 1, 0, 0, 0); 
-  __visc__edge(var_0, var_1, 1, 1, 1, 0); 
-  __visc__bindIn(var_1, 4, 2, 0); 
-  __visc__bindIn(var_1, 5, 3, 0); 
-  __visc__bindIn(var_1, 6, 4, 0); 
-  __visc__bindIn(var_1, 7, 5, 0); 
-  __visc__bindIn(var_1, 8, 6, 0); 
-  __visc__bindIn(var_1, 9, 7, 0); 
-  __visc__bindIn(var_1, 10, 8, 0); 
-  __visc__bindIn(var_1, 11, 9, 0); 
-
-  void* var_2 = __visc__createNodeND(0, var_2_node); 
-
-  __visc__edge(var_1, var_2, 1, 0, 0, 0); 
-  __visc__edge(var_1, var_2, 1, 1, 1, 0); 
-
-  void* var_3 = __visc__createNodeND(0, var_3_node); 
-
-  __visc__edge(var_2, var_3, 1, 0, 0, 0); 
-  __visc__edge(var_2, var_3, 1, 1, 1, 0); 
-  __visc__bindIn(var_3, 12, 2, 0); 
-  __visc__bindIn(var_3, 13, 3, 0); 
-
-  void* var_4 = __visc__createNodeND(0, var_4_node); 
-
-  __visc__edge(var_3, var_4, 1, 0, 0, 0); 
-  __visc__edge(var_3, var_4, 1, 1, 1, 0); 
-  __visc__bindIn(var_4, 14, 2, 0); 
-  __visc__bindIn(var_4, 15, 3, 0); 
-  __visc__bindIn(var_4, 16, 4, 0); 
-  __visc__bindIn(var_4, 17, 5, 0); 
-  __visc__bindIn(var_4, 18, 6, 0); 
-  __visc__bindIn(var_4, 19, 7, 0); 
-  __visc__bindIn(var_4, 20, 8, 0); 
-  __visc__bindIn(var_4, 21, 9, 0); 
-
-  void* var_5 = __visc__createNodeND(0, var_5_node); 
-
-  __visc__edge(var_4, var_5, 1, 0, 0, 0); 
-  __visc__edge(var_4, var_5, 1, 1, 1, 0); 
-
-  void* var_6 = __visc__createNodeND(0, var_6_node); 
-
-  __visc__edge(var_5, var_6, 1, 0, 0, 0); 
-  __visc__edge(var_5, var_6, 1, 1, 1, 0); 
-  __visc__bindIn(var_6, 22, 2, 0); 
-  __visc__bindIn(var_6, 23, 3, 0); 
-
-  void* var_7 = __visc__createNodeND(0, var_7_node); 
-
-  __visc__edge(var_6, var_7, 1, 0, 0, 0); 
-  __visc__edge(var_6, var_7, 1, 1, 1, 0); 
-  __visc__bindIn(var_7, 24, 2, 0); 
-  __visc__bindIn(var_7, 25, 3, 0); 
-  __visc__bindIn(var_7, 26, 4, 0); 
-  __visc__bindIn(var_7, 27, 5, 0); 
-  __visc__bindIn(var_7, 28, 6, 0); 
-  __visc__bindIn(var_7, 29, 7, 0); 
-  __visc__bindIn(var_7, 30, 8, 0); 
-  __visc__bindIn(var_7, 31, 9, 0); 
-
-  void* var_8 = __visc__createNodeND(0, var_8_node); 
-
-  __visc__edge(var_7, var_8, 1, 0, 0, 0); 
-  __visc__edge(var_7, var_8, 1, 1, 1, 0); 
-
-  void* var_9 = __visc__createNodeND(0, var_9_node); 
-
-  __visc__edge(var_8, var_9, 1, 0, 0, 0); 
-  __visc__edge(var_8, var_9, 1, 1, 1, 0); 
-  __visc__bindIn(var_9, 32, 2, 0); 
-  __visc__bindIn(var_9, 33, 3, 0); 
-
-  void* var_10 = __visc__createNodeND(0, var_10_node); 
-
-  __visc__edge(var_9, var_10, 1, 0, 0, 0); 
-  __visc__edge(var_9, var_10, 1, 1, 1, 0); 
-  __visc__bindIn(var_10, 34, 2, 0); 
-  __visc__bindIn(var_10, 35, 3, 0); 
-  __visc__bindIn(var_10, 36, 4, 0); 
-  __visc__bindIn(var_10, 37, 5, 0); 
-  __visc__bindIn(var_10, 38, 6, 0); 
-  __visc__bindIn(var_10, 39, 7, 0); 
-  __visc__bindIn(var_10, 40, 8, 0); 
-  __visc__bindIn(var_10, 41, 9, 0); 
-
-  void* var_11 = __visc__createNodeND(0, var_11_node); 
-
-  __visc__edge(var_10, var_11, 1, 0, 0, 0); 
-  __visc__edge(var_10, var_11, 1, 1, 1, 0); 
-
-  void* var_12 = __visc__createNodeND(0, var_12_node); 
-
-  __visc__edge(var_11, var_12, 1, 0, 0, 0); 
-  __visc__edge(var_11, var_12, 1, 1, 1, 0); 
-  __visc__bindIn(var_12, 42, 2, 0); 
-  __visc__bindIn(var_12, 43, 3, 0); 
-
-  void* var_13 = __visc__createNodeND(0, var_13_node); 
-
-  __visc__edge(var_12, var_13, 1, 0, 0, 0); 
-  __visc__edge(var_12, var_13, 1, 1, 1, 0); 
-  __visc__bindIn(var_13, 44, 2, 0); 
-  __visc__bindIn(var_13, 45, 3, 0); 
-  __visc__bindIn(var_13, 46, 4, 0); 
-  __visc__bindIn(var_13, 47, 5, 0); 
-  __visc__bindIn(var_13, 48, 6, 0); 
-  __visc__bindIn(var_13, 49, 7, 0); 
-  __visc__bindIn(var_13, 50, 8, 0); 
-  __visc__bindIn(var_13, 51, 9, 0); 
-
-  void* var_14 = __visc__createNodeND(0, var_14_node); 
-
-  __visc__edge(var_13, var_14, 1, 0, 0, 0); 
-  __visc__edge(var_13, var_14, 1, 1, 1, 0); 
-
-  void* var_15 = __visc__createNodeND(0, var_15_node); 
-
-  __visc__edge(var_14, var_15, 1, 0, 0, 0); 
-  __visc__edge(var_14, var_15, 1, 1, 1, 0); 
-  __visc__bindIn(var_15, 52, 2, 0); 
-  __visc__bindIn(var_15, 53, 3, 0); 
-
-  void* var_16 = __visc__createNodeND(0, var_16_node); 
-
-  __visc__edge(var_15, var_16, 1, 0, 0, 0); 
-  __visc__edge(var_15, var_16, 1, 1, 1, 0); 
-  __visc__bindIn(var_16, 54, 2, 0); 
-  __visc__bindIn(var_16, 55, 3, 0); 
-  __visc__bindIn(var_16, 56, 4, 0); 
-  __visc__bindIn(var_16, 57, 5, 0); 
-  __visc__bindIn(var_16, 58, 6, 0); 
-  __visc__bindIn(var_16, 59, 7, 0); 
-  __visc__bindIn(var_16, 60, 8, 0); 
-  __visc__bindIn(var_16, 61, 9, 0); 
-
-  void* var_17 = __visc__createNodeND(0, var_17_node); 
-
-  __visc__edge(var_16, var_17, 1, 0, 0, 0); 
-  __visc__edge(var_16, var_17, 1, 1, 1, 0); 
-
-  void* var_18 = __visc__createNodeND(0, var_18_node); 
-
-  __visc__edge(var_17, var_18, 1, 0, 0, 0); 
-  __visc__edge(var_17, var_18, 1, 1, 1, 0); 
-  __visc__bindIn(var_18, 62, 2, 0); 
-  __visc__bindIn(var_18, 63, 3, 0); 
-
-  void* var_19 = __visc__createNodeND(0, var_19_node); 
-
-  __visc__edge(var_18, var_19, 1, 0, 0, 0); 
-  __visc__edge(var_18, var_19, 1, 1, 1, 0); 
-  __visc__bindIn(var_19, 64, 2, 0); 
-  __visc__bindIn(var_19, 65, 3, 0); 
-  __visc__bindIn(var_19, 66, 4, 0); 
-  __visc__bindIn(var_19, 67, 5, 0); 
-  __visc__bindIn(var_19, 68, 6, 0); 
-  __visc__bindIn(var_19, 69, 7, 0); 
-  __visc__bindIn(var_19, 70, 8, 0); 
-  __visc__bindIn(var_19, 71, 9, 0); 
-
-  void* var_20 = __visc__createNodeND(0, var_20_node); 
-
-  __visc__edge(var_19, var_20, 1, 0, 0, 0); 
-  __visc__edge(var_19, var_20, 1, 1, 1, 0); 
-
-  void* var_21 = __visc__createNodeND(0, var_21_node); 
-
-  __visc__edge(var_20, var_21, 1, 0, 0, 0); 
-  __visc__edge(var_20, var_21, 1, 1, 1, 0); 
-  __visc__bindIn(var_21, 72, 2, 0); 
-  __visc__bindIn(var_21, 73, 3, 0); 
-
-  void* var_22 = __visc__createNodeND(0, var_22_node); 
-
-  __visc__edge(var_21, var_22, 1, 0, 0, 0); 
-  __visc__edge(var_21, var_22, 1, 1, 1, 0); 
-  __visc__bindIn(var_22, 74, 2, 0); 
-  __visc__bindIn(var_22, 75, 3, 0); 
-  __visc__bindIn(var_22, 76, 4, 0); 
-  __visc__bindIn(var_22, 77, 5, 0); 
-  __visc__bindIn(var_22, 78, 6, 0); 
-  __visc__bindIn(var_22, 79, 7, 0); 
-  __visc__bindIn(var_22, 80, 8, 0); 
-  __visc__bindIn(var_22, 81, 9, 0); 
-
-  void* var_23 = __visc__createNodeND(0, var_23_node); 
-
-  __visc__edge(var_22, var_23, 1, 0, 0, 0); 
-  __visc__edge(var_22, var_23, 1, 1, 1, 0); 
-
-  void* var_24 = __visc__createNodeND(0, var_24_node); 
-
-  __visc__edge(var_23, var_24, 1, 0, 0, 0); 
-  __visc__edge(var_23, var_24, 1, 1, 1, 0); 
-  __visc__bindIn(var_24, 82, 2, 0); 
-  __visc__bindIn(var_24, 83, 3, 0); 
-
-  void* var_25 = __visc__createNodeND(0, var_25_node); 
-
-  __visc__edge(var_24, var_25, 1, 0, 0, 0); 
-  __visc__edge(var_24, var_25, 1, 1, 1, 0); 
-  __visc__bindIn(var_25, 84, 2, 0); 
-  __visc__bindIn(var_25, 85, 3, 0); 
-  __visc__bindIn(var_25, 86, 4, 0); 
-  __visc__bindIn(var_25, 87, 5, 0); 
-  __visc__bindIn(var_25, 88, 6, 0); 
-  __visc__bindIn(var_25, 89, 7, 0); 
-  __visc__bindIn(var_25, 90, 8, 0); 
-  __visc__bindIn(var_25, 91, 9, 0); 
-
-  void* var_26 = __visc__createNodeND(0, var_26_node); 
-
-  __visc__edge(var_25, var_26, 1, 0, 0, 0); 
-  __visc__edge(var_25, var_26, 1, 1, 1, 0); 
-
-  void* var_27 = __visc__createNodeND(0, var_27_node); 
-
-  __visc__edge(var_26, var_27, 1, 0, 0, 0); 
-  __visc__edge(var_26, var_27, 1, 1, 1, 0); 
-  __visc__bindIn(var_27, 92, 2, 0); 
-  __visc__bindIn(var_27, 93, 3, 0); 
-
-  void* var_28 = __visc__createNodeND(0, var_28_node); 
-
-  __visc__edge(var_27, var_28, 1, 0, 0, 0); 
-  __visc__edge(var_27, var_28, 1, 1, 1, 0); 
-  __visc__bindIn(var_28, 94, 2, 0); 
-  __visc__bindIn(var_28, 95, 3, 0); 
-  __visc__bindIn(var_28, 96, 4, 0); 
-  __visc__bindIn(var_28, 97, 5, 0); 
-  __visc__bindIn(var_28, 98, 6, 0); 
-  __visc__bindIn(var_28, 99, 7, 0); 
-  __visc__bindIn(var_28, 100, 8, 0); 
-  __visc__bindIn(var_28, 101, 9, 0); 
-
-  void* var_29 = __visc__createNodeND(0, var_29_node); 
-
-  __visc__edge(var_28, var_29, 1, 0, 0, 0); 
-  __visc__edge(var_28, var_29, 1, 1, 1, 0); 
-
-  void* var_30 = __visc__createNodeND(0, var_30_node); 
-
-  __visc__edge(var_29, var_30, 1, 0, 0, 0); 
-  __visc__edge(var_29, var_30, 1, 1, 1, 0); 
-  __visc__bindIn(var_30, 102, 2, 0); 
-  __visc__bindIn(var_30, 103, 3, 0); 
-
-  void* var_31 = __visc__createNodeND(0, var_31_node); 
-
-  __visc__edge(var_30, var_31, 1, 0, 0, 0); 
-  __visc__edge(var_30, var_31, 1, 1, 1, 0); 
-  __visc__bindIn(var_31, 104, 2, 0); 
-  __visc__bindIn(var_31, 105, 3, 0); 
-  __visc__bindIn(var_31, 106, 4, 0); 
-  __visc__bindIn(var_31, 107, 5, 0); 
-  __visc__bindIn(var_31, 108, 6, 0); 
-  __visc__bindIn(var_31, 109, 7, 0); 
-  __visc__bindIn(var_31, 110, 8, 0); 
-  __visc__bindIn(var_31, 111, 9, 0); 
-
-  void* var_32 = __visc__createNodeND(0, var_32_node); 
-
-  __visc__edge(var_31, var_32, 1, 0, 0, 0); 
-  __visc__edge(var_31, var_32, 1, 1, 1, 0); 
-
-  void* var_33 = __visc__createNodeND(0, var_33_node); 
-
-  __visc__edge(var_32, var_33, 1, 0, 0, 0); 
-  __visc__edge(var_32, var_33, 1, 1, 1, 0); 
-  __visc__bindIn(var_33, 112, 2, 0); 
-  __visc__bindIn(var_33, 113, 3, 0); 
-
-  void* var_34 = __visc__createNodeND(0, var_34_node); 
-
-  __visc__edge(var_33, var_34, 1, 0, 0, 0); 
-  __visc__edge(var_33, var_34, 1, 1, 1, 0); 
-  __visc__bindIn(var_34, 114, 2, 0); 
-  __visc__bindIn(var_34, 115, 3, 0); 
-  __visc__bindIn(var_34, 116, 4, 0); 
-  __visc__bindIn(var_34, 117, 5, 0); 
-  __visc__bindIn(var_34, 118, 6, 0); 
-  __visc__bindIn(var_34, 119, 7, 0); 
-  __visc__bindIn(var_34, 120, 8, 0); 
-  __visc__bindIn(var_34, 121, 9, 0); 
-
-  void* var_35 = __visc__createNodeND(0, var_35_node); 
-
-  __visc__edge(var_34, var_35, 1, 0, 0, 0); 
-  __visc__edge(var_34, var_35, 1, 1, 1, 0); 
-
-  void* var_36 = __visc__createNodeND(0, var_36_node); 
-
-  __visc__edge(var_35, var_36, 1, 0, 0, 0); 
-  __visc__edge(var_35, var_36, 1, 1, 1, 0); 
-  __visc__bindIn(var_36, 122, 2, 0); 
-  __visc__bindIn(var_36, 123, 3, 0); 
-
-  void* var_37 = __visc__createNodeND(0, var_37_node); 
-
-  __visc__edge(var_36, var_37, 1, 0, 0, 0); 
-  __visc__edge(var_36, var_37, 1, 1, 1, 0); 
-  __visc__bindIn(var_37, 124, 2, 0); 
-  __visc__bindIn(var_37, 125, 3, 0); 
-  __visc__bindIn(var_37, 126, 4, 0); 
-  __visc__bindIn(var_37, 127, 5, 0); 
-  __visc__bindIn(var_37, 128, 6, 0); 
-  __visc__bindIn(var_37, 129, 7, 0); 
-  __visc__bindIn(var_37, 130, 8, 0); 
-  __visc__bindIn(var_37, 131, 9, 0); 
-
-  void* var_38 = __visc__createNodeND(0, var_38_node); 
-
-  __visc__edge(var_37, var_38, 1, 0, 0, 0); 
-  __visc__edge(var_37, var_38, 1, 1, 1, 0); 
-
-  void* var_39 = __visc__createNodeND(0, var_39_node); 
-
-  __visc__edge(var_38, var_39, 1, 0, 0, 0); 
-  __visc__edge(var_38, var_39, 1, 1, 1, 0); 
-
-  void* var_40 = __visc__createNodeND(0, var_40_node); 
-
-  __visc__edge(var_39, var_40, 1, 0, 0, 0); 
-  __visc__edge(var_39, var_40, 1, 1, 1, 0); 
-  __visc__bindIn(var_40, 132, 2, 0); 
-  __visc__bindIn(var_40, 133, 3, 0); 
-
-  void* var_41 = __visc__createNodeND(0, var_41_node); 
-
-  __visc__edge(var_40, var_41, 1, 0, 0, 0); 
-  __visc__edge(var_40, var_41, 1, 1, 1, 0); 
-  __visc__bindIn(var_41, 134, 2, 0); 
-  __visc__bindIn(var_41, 135, 3, 0); 
-
-  void* var_42 = __visc__createNodeND(0, var_42_node); 
-
-  __visc__edge(var_41, var_42, 1, 0, 0, 0); 
-  __visc__edge(var_41, var_42, 1, 1, 1, 0); 
-
-  __visc__bindOut(var_42, 0, 0, 0); 
-  __visc__bindOut(var_42, 1, 1, 0); 
-
-}
-
-struct ret_t {
-  void* tensor; 
-  size_t bytes; 
-}; 
-
-typedef struct __attribute__((__packed__)) {
-  void* input; 
-  size_t input_bytes; 
-  void* conv2d_1_w; 
-  size_t conv2d_1_w_bytes; 
-  void* batch_normalization_1_gamma; 
-  size_t batch_normalization_1_gamma_bytes; 
-  void* batch_normalization_1_beta; 
-  size_t batch_normalization_1_beta_bytes; 
-  void* batch_normalization_1_mean; 
-  size_t batch_normalization_1_mean_bytes; 
-  void* batch_normalization_1_variance; 
-  size_t batch_normalization_1_variance_bytes; 
-  void* depthwise_conv2d_1_w; 
-  size_t depthwise_conv2d_1_w_bytes; 
-  void* batch_normalization_2_gamma; 
-  size_t batch_normalization_2_gamma_bytes; 
-  void* batch_normalization_2_beta; 
-  size_t batch_normalization_2_beta_bytes; 
-  void* batch_normalization_2_mean; 
-  size_t batch_normalization_2_mean_bytes; 
-  void* batch_normalization_2_variance; 
-  size_t batch_normalization_2_variance_bytes; 
-  void* conv2d_2_w; 
-  size_t conv2d_2_w_bytes; 
-  void* batch_normalization_3_gamma; 
-  size_t batch_normalization_3_gamma_bytes; 
-  void* batch_normalization_3_beta; 
-  size_t batch_normalization_3_beta_bytes; 
-  void* batch_normalization_3_mean; 
-  size_t batch_normalization_3_mean_bytes; 
-  void* batch_normalization_3_variance; 
-  size_t batch_normalization_3_variance_bytes; 
-  void* depthwise_conv2d_2_w; 
-  size_t depthwise_conv2d_2_w_bytes; 
-  void* batch_normalization_4_gamma; 
-  size_t batch_normalization_4_gamma_bytes; 
-  void* batch_normalization_4_beta; 
-  size_t batch_normalization_4_beta_bytes; 
-  void* batch_normalization_4_mean; 
-  size_t batch_normalization_4_mean_bytes; 
-  void* batch_normalization_4_variance; 
-  size_t batch_normalization_4_variance_bytes; 
-  void* conv2d_3_w; 
-  size_t conv2d_3_w_bytes; 
-  void* batch_normalization_5_gamma; 
-  size_t batch_normalization_5_gamma_bytes; 
-  void* batch_normalization_5_beta; 
-  size_t batch_normalization_5_beta_bytes; 
-  void* batch_normalization_5_mean; 
-  size_t batch_normalization_5_mean_bytes; 
-  void* batch_normalization_5_variance; 
-  size_t batch_normalization_5_variance_bytes; 
-  void* depthwise_conv2d_3_w; 
-  size_t depthwise_conv2d_3_w_bytes; 
-  void* batch_normalization_6_gamma; 
-  size_t batch_normalization_6_gamma_bytes; 
-  void* batch_normalization_6_beta; 
-  size_t batch_normalization_6_beta_bytes; 
-  void* batch_normalization_6_mean; 
-  size_t batch_normalization_6_mean_bytes; 
-  void* batch_normalization_6_variance; 
-  size_t batch_normalization_6_variance_bytes; 
-  void* conv2d_4_w; 
-  size_t conv2d_4_w_bytes; 
-  void* batch_normalization_7_gamma; 
-  size_t batch_normalization_7_gamma_bytes; 
-  void* batch_normalization_7_beta; 
-  size_t batch_normalization_7_beta_bytes; 
-  void* batch_normalization_7_mean; 
-  size_t batch_normalization_7_mean_bytes; 
-  void* batch_normalization_7_variance; 
-  size_t batch_normalization_7_variance_bytes; 
-  void* depthwise_conv2d_4_w; 
-  size_t depthwise_conv2d_4_w_bytes; 
-  void* batch_normalization_8_gamma; 
-  size_t batch_normalization_8_gamma_bytes; 
-  void* batch_normalization_8_beta; 
-  size_t batch_normalization_8_beta_bytes; 
-  void* batch_normalization_8_mean; 
-  size_t batch_normalization_8_mean_bytes; 
-  void* batch_normalization_8_variance; 
-  size_t batch_normalization_8_variance_bytes; 
-  void* conv2d_5_w; 
-  size_t conv2d_5_w_bytes; 
-  void* batch_normalization_9_gamma; 
-  size_t batch_normalization_9_gamma_bytes; 
-  void* batch_normalization_9_beta; 
-  size_t batch_normalization_9_beta_bytes; 
-  void* batch_normalization_9_mean; 
-  size_t batch_normalization_9_mean_bytes; 
-  void* batch_normalization_9_variance; 
-  size_t batch_normalization_9_variance_bytes; 
-  void* depthwise_conv2d_5_w; 
-  size_t depthwise_conv2d_5_w_bytes; 
-  void* batch_normalization_10_gamma; 
-  size_t batch_normalization_10_gamma_bytes; 
-  void* batch_normalization_10_beta; 
-  size_t batch_normalization_10_beta_bytes; 
-  void* batch_normalization_10_mean; 
-  size_t batch_normalization_10_mean_bytes; 
-  void* batch_normalization_10_variance; 
-  size_t batch_normalization_10_variance_bytes; 
-  void* conv2d_6_w; 
-  size_t conv2d_6_w_bytes; 
-  void* batch_normalization_11_gamma; 
-  size_t batch_normalization_11_gamma_bytes; 
-  void* batch_normalization_11_beta; 
-  size_t batch_normalization_11_beta_bytes; 
-  void* batch_normalization_11_mean; 
-  size_t batch_normalization_11_mean_bytes; 
-  void* batch_normalization_11_variance; 
-  size_t batch_normalization_11_variance_bytes; 
-  void* depthwise_conv2d_6_w; 
-  size_t depthwise_conv2d_6_w_bytes; 
-  void* batch_normalization_12_gamma; 
-  size_t batch_normalization_12_gamma_bytes; 
-  void* batch_normalization_12_beta; 
-  size_t batch_normalization_12_beta_bytes; 
-  void* batch_normalization_12_mean; 
-  size_t batch_normalization_12_mean_bytes; 
-  void* batch_normalization_12_variance; 
-  size_t batch_normalization_12_variance_bytes; 
-  void* conv2d_7_w; 
-  size_t conv2d_7_w_bytes; 
-  void* batch_normalization_13_gamma; 
-  size_t batch_normalization_13_gamma_bytes; 
-  void* batch_normalization_13_beta; 
-  size_t batch_normalization_13_beta_bytes; 
-  void* batch_normalization_13_mean; 
-  size_t batch_normalization_13_mean_bytes; 
-  void* batch_normalization_13_variance; 
-  size_t batch_normalization_13_variance_bytes; 
-  void* dense_1_w; 
-  size_t dense_1_w_bytes; 
-  void* dense_1_b; 
-  size_t dense_1_b_bytes; 
-
-  struct ret_t r; 
-}
-RootIn;
-
-int main(){ 
-
-std::string dir_prefix = std::string("data/mobilenet_shallow_nathan/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-
-__visc__init(); 
-RootIn* args = static_cast<RootIn*>(malloc(sizeof(RootIn))); 
-
-args->input = input; 
-args->input_bytes = 0; 
-args->conv2d_1_w = conv2d_1_w; 
-args->conv2d_1_w_bytes = 0; 
-args->batch_normalization_1_gamma = batch_normalization_1_gamma; 
-args->batch_normalization_1_gamma_bytes = 0; 
-args->batch_normalization_1_beta = batch_normalization_1_beta; 
-args->batch_normalization_1_beta_bytes = 0; 
-args->batch_normalization_1_mean = batch_normalization_1_mean; 
-args->batch_normalization_1_mean_bytes = 0; 
-args->batch_normalization_1_variance = batch_normalization_1_variance; 
-args->batch_normalization_1_variance_bytes = 0; 
-args->depthwise_conv2d_1_w = depthwise_conv2d_1_w; 
-args->depthwise_conv2d_1_w_bytes = 0; 
-args->batch_normalization_2_gamma = batch_normalization_2_gamma; 
-args->batch_normalization_2_gamma_bytes = 0; 
-args->batch_normalization_2_beta = batch_normalization_2_beta; 
-args->batch_normalization_2_beta_bytes = 0; 
-args->batch_normalization_2_mean = batch_normalization_2_mean; 
-args->batch_normalization_2_mean_bytes = 0; 
-args->batch_normalization_2_variance = batch_normalization_2_variance; 
-args->batch_normalization_2_variance_bytes = 0; 
-args->conv2d_2_w = conv2d_2_w; 
-args->conv2d_2_w_bytes = 0; 
-args->batch_normalization_3_gamma = batch_normalization_3_gamma; 
-args->batch_normalization_3_gamma_bytes = 0; 
-args->batch_normalization_3_beta = batch_normalization_3_beta; 
-args->batch_normalization_3_beta_bytes = 0; 
-args->batch_normalization_3_mean = batch_normalization_3_mean; 
-args->batch_normalization_3_mean_bytes = 0; 
-args->batch_normalization_3_variance = batch_normalization_3_variance; 
-args->batch_normalization_3_variance_bytes = 0; 
-args->depthwise_conv2d_2_w = depthwise_conv2d_2_w; 
-args->depthwise_conv2d_2_w_bytes = 0; 
-args->batch_normalization_4_gamma = batch_normalization_4_gamma; 
-args->batch_normalization_4_gamma_bytes = 0; 
-args->batch_normalization_4_beta = batch_normalization_4_beta; 
-args->batch_normalization_4_beta_bytes = 0; 
-args->batch_normalization_4_mean = batch_normalization_4_mean; 
-args->batch_normalization_4_mean_bytes = 0; 
-args->batch_normalization_4_variance = batch_normalization_4_variance; 
-args->batch_normalization_4_variance_bytes = 0; 
-args->conv2d_3_w = conv2d_3_w; 
-args->conv2d_3_w_bytes = 0; 
-args->batch_normalization_5_gamma = batch_normalization_5_gamma; 
-args->batch_normalization_5_gamma_bytes = 0; 
-args->batch_normalization_5_beta = batch_normalization_5_beta; 
-args->batch_normalization_5_beta_bytes = 0; 
-args->batch_normalization_5_mean = batch_normalization_5_mean; 
-args->batch_normalization_5_mean_bytes = 0; 
-args->batch_normalization_5_variance = batch_normalization_5_variance; 
-args->batch_normalization_5_variance_bytes = 0; 
-args->depthwise_conv2d_3_w = depthwise_conv2d_3_w; 
-args->depthwise_conv2d_3_w_bytes = 0; 
-args->batch_normalization_6_gamma = batch_normalization_6_gamma; 
-args->batch_normalization_6_gamma_bytes = 0; 
-args->batch_normalization_6_beta = batch_normalization_6_beta; 
-args->batch_normalization_6_beta_bytes = 0; 
-args->batch_normalization_6_mean = batch_normalization_6_mean; 
-args->batch_normalization_6_mean_bytes = 0; 
-args->batch_normalization_6_variance = batch_normalization_6_variance; 
-args->batch_normalization_6_variance_bytes = 0; 
-args->conv2d_4_w = conv2d_4_w; 
-args->conv2d_4_w_bytes = 0; 
-args->batch_normalization_7_gamma = batch_normalization_7_gamma; 
-args->batch_normalization_7_gamma_bytes = 0; 
-args->batch_normalization_7_beta = batch_normalization_7_beta; 
-args->batch_normalization_7_beta_bytes = 0; 
-args->batch_normalization_7_mean = batch_normalization_7_mean; 
-args->batch_normalization_7_mean_bytes = 0; 
-args->batch_normalization_7_variance = batch_normalization_7_variance; 
-args->batch_normalization_7_variance_bytes = 0; 
-args->depthwise_conv2d_4_w = depthwise_conv2d_4_w; 
-args->depthwise_conv2d_4_w_bytes = 0; 
-args->batch_normalization_8_gamma = batch_normalization_8_gamma; 
-args->batch_normalization_8_gamma_bytes = 0; 
-args->batch_normalization_8_beta = batch_normalization_8_beta; 
-args->batch_normalization_8_beta_bytes = 0; 
-args->batch_normalization_8_mean = batch_normalization_8_mean; 
-args->batch_normalization_8_mean_bytes = 0; 
-args->batch_normalization_8_variance = batch_normalization_8_variance; 
-args->batch_normalization_8_variance_bytes = 0; 
-args->conv2d_5_w = conv2d_5_w; 
-args->conv2d_5_w_bytes = 0; 
-args->batch_normalization_9_gamma = batch_normalization_9_gamma; 
-args->batch_normalization_9_gamma_bytes = 0; 
-args->batch_normalization_9_beta = batch_normalization_9_beta; 
-args->batch_normalization_9_beta_bytes = 0; 
-args->batch_normalization_9_mean = batch_normalization_9_mean; 
-args->batch_normalization_9_mean_bytes = 0; 
-args->batch_normalization_9_variance = batch_normalization_9_variance; 
-args->batch_normalization_9_variance_bytes = 0; 
-args->depthwise_conv2d_5_w = depthwise_conv2d_5_w; 
-args->depthwise_conv2d_5_w_bytes = 0; 
-args->batch_normalization_10_gamma = batch_normalization_10_gamma; 
-args->batch_normalization_10_gamma_bytes = 0; 
-args->batch_normalization_10_beta = batch_normalization_10_beta; 
-args->batch_normalization_10_beta_bytes = 0; 
-args->batch_normalization_10_mean = batch_normalization_10_mean; 
-args->batch_normalization_10_mean_bytes = 0; 
-args->batch_normalization_10_variance = batch_normalization_10_variance; 
-args->batch_normalization_10_variance_bytes = 0; 
-args->conv2d_6_w = conv2d_6_w; 
-args->conv2d_6_w_bytes = 0; 
-args->batch_normalization_11_gamma = batch_normalization_11_gamma; 
-args->batch_normalization_11_gamma_bytes = 0; 
-args->batch_normalization_11_beta = batch_normalization_11_beta; 
-args->batch_normalization_11_beta_bytes = 0; 
-args->batch_normalization_11_mean = batch_normalization_11_mean; 
-args->batch_normalization_11_mean_bytes = 0; 
-args->batch_normalization_11_variance = batch_normalization_11_variance; 
-args->batch_normalization_11_variance_bytes = 0; 
-args->depthwise_conv2d_6_w = depthwise_conv2d_6_w; 
-args->depthwise_conv2d_6_w_bytes = 0; 
-args->batch_normalization_12_gamma = batch_normalization_12_gamma; 
-args->batch_normalization_12_gamma_bytes = 0; 
-args->batch_normalization_12_beta = batch_normalization_12_beta; 
-args->batch_normalization_12_beta_bytes = 0; 
-args->batch_normalization_12_mean = batch_normalization_12_mean; 
-args->batch_normalization_12_mean_bytes = 0; 
-args->batch_normalization_12_variance = batch_normalization_12_variance; 
-args->batch_normalization_12_variance_bytes = 0; 
-args->conv2d_7_w = conv2d_7_w; 
-args->conv2d_7_w_bytes = 0; 
-args->batch_normalization_13_gamma = batch_normalization_13_gamma; 
-args->batch_normalization_13_gamma_bytes = 0; 
-args->batch_normalization_13_beta = batch_normalization_13_beta; 
-args->batch_normalization_13_beta_bytes = 0; 
-args->batch_normalization_13_mean = batch_normalization_13_mean; 
-args->batch_normalization_13_mean_bytes = 0; 
-args->batch_normalization_13_variance = batch_normalization_13_variance; 
-args->batch_normalization_13_variance_bytes = 0; 
-args->dense_1_w = dense_1_w; 
-args->dense_1_w_bytes = 0; 
-args->dense_1_b = dense_1_b; 
-args->dense_1_b_bytes = 0; 
-
-void* dfg = __visc__launch(0, root, (void*) args); 
-
-__visc__wait(dfg); 
-
-void *result = static_cast<RootIn*>(args)->input; 
-hpvm_request_tensor(result, 0); 
-
-__visc__cleanup(); 
- computeAccuracy2(labels, 10000, result); 
-return 0; 
-
-} 
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_beta.bin
deleted file mode 100644
index 5d9a0d95865637cfb783fb9a56d3ff2ecb57e868..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_gamma.bin
deleted file mode 100644
index 71147ba51b53f9b5f8ed84d3e12b3f60d04e88f0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_mean.bin
deleted file mode 100644
index f75ef27a6bde8cf45607b0e7957603ad5c767928..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_variance.bin
deleted file mode 100644
index cdbb02d6dcc67a983c949224e5ef2356cbed70ec..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_10_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_beta.bin
deleted file mode 100644
index a6d770acd50df688be127899d5ebc76a6b660108..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_gamma.bin
deleted file mode 100644
index 7d2add83b878940a6e83ff33ac8328b08218b036..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_mean.bin
deleted file mode 100644
index 481fa2d212a171377d79b38765b42481939abd0f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_variance.bin
deleted file mode 100644
index 99b00e0a82730dbf49cc6112379b6106b3538f24..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_11_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_beta.bin
deleted file mode 100644
index e2fa099a1b5df7840c7b5b2c8b9ec83bad07f238..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_gamma.bin
deleted file mode 100644
index 2c6d46a8c35a83ea5929e7b0b06980baf1ea8b08..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_mean.bin
deleted file mode 100644
index 4c46529e2774bb4fed9337394213ddfd6fa3b7a4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_variance.bin
deleted file mode 100644
index 8afde358ed8dffed9eca531e3ced41953036c926..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_12_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_beta.bin
deleted file mode 100644
index 5192e8414e7349eb49f139c31d688349dfcaa915..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_gamma.bin
deleted file mode 100644
index c7ba0b707e96c024bbdcf825a28f78522685b7e2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_mean.bin
deleted file mode 100644
index 41c23352862bc90c6cb298fbda821712c919673b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_variance.bin
deleted file mode 100644
index 67aa92699f5da3e6384e2502fce4cf985d207e2c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_13_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_beta.bin
deleted file mode 100644
index 05d61c8e00f196b83dde7de794cc9feff2929582..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-êâ®>@Hx>„jb¾Çå>:Ù*>YR¡>nÍ>ù}u?ýæ>¤ÌB?|¾bç&?ǁO?e=”?¿‹¸¾f'e¾8ƒ¸½è‹Î;Éï:?Ó
-‹?ˆþ„>½»>ŸÞ>î‚?˜5A?|6žàÒ>²•ݼÞ>?Ä1?nŸ¾u‡‚>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_gamma.bin
deleted file mode 100644
index 1aaaeaa110d8b9eb8108a1546302f8d5c1c12c35..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-³]ƒ?Ç/?®0€?]…o?­ƒ?ß­Œ?Vň?`Z?Gk„?±*`?žÐƒ?pÈ~?
*?+g ?4
-u?Òü?Qƒ?|?š	O??ä?K{?^5Œ?ÃÜ‚?£?…ØA?žTr?1y?÷€?!56?&ñƒ?HWv?§^‚?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_mean.bin
deleted file mode 100644
index ffcd3adfeac9b601872fa59caa42601fdc10494c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-ˆG¢¹$=ã®Ý;9Oü¼ø-'<p;K<
à»Þâ€;žcé¼K!¼“¿œ<õön¼M9 ;¬…;’½î3ì<¯Á;¨`…<|¾¼þæ:¼;ÿ%=÷»L¶Z»)z·»ªfºS:Œ<j*>»¾M<¨u» û;%;þ;ñ&J:
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_variance.bin
deleted file mode 100644
index f29dc5a9db7e4fe9783917749bd151ce80e40702..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_1_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-ÃÅn@éA²£š?ä+@"@9ÛÞ@áÀ(@•¹÷>Ò¢Á@ƒ'E@)¡@øˆ+@œZž>«Ç?A?¤A˜x°A0ªª?®¯#AÿΕ@«Vì>~ÑÅAg«“@VúAÿ>;j@š”@j¯ø?.AB¾>œê;@ø#û?Q	~@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_beta.bin
deleted file mode 100644
index ba12532332cec1d6ee20d16d04be81575a8f0802..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_beta.bin
+++ /dev/null
@@ -1 +0,0 @@
-N?¶k©¾¬r>{_?kÙy¾R?fÀ?ä%Q?“k¾åœ­?—õ^½go?=9L>A†?ím½Ôm	¿†ǽR?²¾íO‡?àhv?ìt4¾ÙN?cá~?i«Ÿ?¹[?•ï¾<M_>Êõö¾>ðn½rÞ¾?	Ž?¼êªE=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_gamma.bin
deleted file mode 100644
index bf0dd075d19280dfcda711fd95eeab6fb429b8f5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-L¾ö>üiQ?-)]?!0?Lw_?`Ë…?ö%4?sÚ¢?²È7?œé`?Ò¿b?¬Y?hv?MC	@\ÞY?ñ8P?”Ü\?»QI?ë8?ʤ‘?Ëîl?`¥&?,S?&›?wxr?Ãl_?@8l?k<,?6Ä…?Y={?YjC?2L>?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_mean.bin
deleted file mode 100644
index faec424f63fab99e4ba00101a005c1b84cb2f8f3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_mean.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-’D´¿hJ¿úVN¿ŠÊ>?Ú%8¿¶œf?U_C¾]º¤½†Ã?çv=Ü"¾Ï6¾g½&¾òß[>`"¿0Õ8>•¿Fô¦¾i³?Z?=鯿‰\TÀ
5[?&Ñ6>Ÿÿ.¿ýkn¾Ú4•>p%5¿Î(>"×>Ù
-¿p²M>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_variance.bin
deleted file mode 100644
index 80125312bc29cc27bcb9a51db2d206c70b19c25d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_2_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_beta.bin
deleted file mode 100644
index 0a7e5127f93e4f3e77893c02209dd34f92fcde00..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ùEͽý¨½õž9¾pÊ>Ûàÿ½›³>Ÿÿ²>£f>’ë¾Ó‹Ý¹"å>¼½–¾
-ð~¾½¼¾Üöý==Á‹=ä!>D>°©>’çn>£ç;(+Ö=Ÿ‡?Üÿl?m¾$0`>¥¡<¸Ïy?Vál?‚­?(Ò'½Y?o>uƒ@>€q>…ö>ë}î>fÓW?K>¿»
?)?F¿¾Ÿ¯Y¾Ý¯?·°—>YdL¾–ZC?þÕ÷=?pUV?þV]>›Â©¾å=9{/>ɒ޽«U>¸ŠÖ½*Ï&>1Í;Mj1¾B‡A=™¾ü½æ…)?!3?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_gamma.bin
deleted file mode 100644
index ab4be7e7af315799ddc2f371e09442d81c81ec9e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_mean.bin
deleted file mode 100644
index 5c4cccbc2d7756430aba85f100d164425b7b7559..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_variance.bin
deleted file mode 100644
index 88e0320d9764ac47a0ffeccd912430db4e3a70ad..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_3_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_beta.bin
deleted file mode 100644
index 78b0f312269445116d4b9e05d3f2f85730509d46..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_gamma.bin
deleted file mode 100644
index cc9ac2a0fcc9dc57b61c54d13f9cdaba8bf045c9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_mean.bin
deleted file mode 100644
index e184ea4954ffe0e8070fd467bc90093c142ee754..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_variance.bin
deleted file mode 100644
index dd6c0672454934523c04c2e124bb64d024c2207f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_4_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_beta.bin
deleted file mode 100644
index d111c363bdab8b36db98fcefcd2eb61e080eadd4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_gamma.bin
deleted file mode 100644
index aae71935a9ec2124e203c921e2d2ca570f3aa2a8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_mean.bin
deleted file mode 100644
index b4675bad00eddb39999c5411eb225f9d13a22fc4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_variance.bin
deleted file mode 100644
index f8126c266f398a9013241ee5d97fe42beaa5bb37..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_5_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_beta.bin
deleted file mode 100644
index c18a950b0d0acca31f82e84135c392e348868011..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_gamma.bin
deleted file mode 100644
index 92bc587a86c98aadc5549f3da65b4f74e812b2fb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_mean.bin
deleted file mode 100644
index c888f2c909ac6d95871fe944b6b4f51242d4eb8a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_variance.bin
deleted file mode 100644
index a5a799857b7cc50a9aa8208aab08e7270dccca5b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_6_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_beta.bin
deleted file mode 100644
index ab02be5f352315724b5ca3b59e33ff085f46207d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_gamma.bin
deleted file mode 100644
index 72c58ae29db08ac94c3b9b778ea015405cb9d3f6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_mean.bin
deleted file mode 100644
index 7f0e01a07c23faa2101cbf299ea9d35fe3d5e3ec..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_variance.bin
deleted file mode 100644
index 094474aca2ad49d1400c71d9acbfcd1631c7be18..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_7_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_beta.bin
deleted file mode 100644
index 5f92c58a7c47c207a98a77f6961410d08e8446f0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_gamma.bin
deleted file mode 100644
index 6ab36ce54740e9e4a4e4948684e0e4fbbd71b1cb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_gamma.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-œ®?œË?®m‚?…4?¸µ?µ¦?ã?‡F'?nÑ#? 
-=?sˆ.?V>“?m¹?æÓD?	SÁ?§7?Š›¼?
‡?É&?qä™?§§?Ä'?"ZN?aég?"ò*?&.)?ÕM?%??Ы?%Œ1?Å:?æ! ?”O:?ÐѨ?9½*?õ¬?w¨?ÎÁT?HÆ
-?%„?ª)?‚e&?x°?‡Ë?µ«±?ÏÜ?C1?w¯?kì>?  ?uD?£òˆ?²’Ÿ?µ¢G?!0?Ñãœ?eŠ2?!b$?úK¬? d?Ü6¨?è‰?2À¹?÷f?ñ?ᆱ?»g?zT@?<e,?Å@7?z‚0?W*:?)‘+?£?(?«Ø;?ùI±?·Œ?Œ	?²Í?Ä2?o¶ª?¾­)? ?N³V?ßHž?Ç-?ü,-??#?HÄ?¾À}?pG?Yù#?HÁ?0è?„U?'%?u‘?£Í?O«?©é¨?ÉN›?£ED?@œ?¨À˜?v™?>²?G²?”?¶?‡ ?Y·¬?j?´ØP?^À?Xܘ?…ì?°bG?¨¨?で?r¨+?绤?ŸG£?¿¬E?&6%?áˆ?ô²?"
£?¿®›?-4?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_mean.bin
deleted file mode 100644
index 5d093813a0926dd3c1c67b6f50e092465101fde9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_mean.bin
+++ /dev/null
@@ -1,4 +0,0 @@
-¡‰>-	¿Â¿ZF%>Õ#
-¿7æ¾*~>Šß;>€Ó3>P^P>¯ÓX>íÝ¿2 Ë=Æô”>¡o"¿©àÕ>EX$¿30	>œË©<tпh{W>LÕˆ>¨Â>KZâ>+_>;™>€g¾>ÒjÇ>ô¿C‘>í>I“#¿2‹Ï>üfÁ¾‡ø+>4rc>•—˾‘÷´>ZœÕ=V±ì=Ü^é=ƒv‘>[;û¾sˆ=ÓŒ¿Ë“>@AI>¸:¿ÿäD>c˜Z>Žè–>'Š¿t7¿¬?~>gÈ{=ê© ¿9½>Jr'>ðg¿(=d>X)ÿ¾œÉL>)I¿{>ÛfC>ÅS¿”š¥>ô„Ï>í?¹>O>=>>¥>ˆ§‡>¿]&=¡q…>‘)¿Î
-¿-N>Cü=H÷>Úªø¾9>êü>+Ó>Û¿ÀDB>xÈn>²ï9>h›[>Æ•ì¾ôn–>ušp>|Zi>ûÊ—>ՏY>|‚7>îÓ‹=‹Ú÷=ðŠ´¾=—¿ µÓ¾Œ_^>Òr!¿bâ¾ÏÍÞ¾Ԑ>#鿌rH¿¾îƒ>ñ÷¿fP>å­B>â:/¿I	ƾà(r>[ïa>`
-¿O£¿c@à=A¯Û¾Y'¿‚°>3>P2F>  ¿Ä ¿ò¿&;>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_variance.bin
deleted file mode 100644
index edf6463b0ca999595327a9dc300242a9e58c1fb8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_8_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_beta.bin
deleted file mode 100644
index ad3f1dc8965ba641749d65a5d0c5b32ab40c5dd4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_gamma.bin
deleted file mode 100644
index ec2b90646b3c7f21565e4972638e746e71a2b5bb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_mean.bin
deleted file mode 100644
index 47b2393cf22e01162577be3e361a1a40caec6bb8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_variance.bin
deleted file mode 100644
index fb0c96059789a653f0d064e2c4743287b213d90d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/batch_normalization_9_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_1_w.bin
deleted file mode 100644
index 3e10934df8c5194e89ced8a8c6dfc0c496d63659..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_2_w.bin
deleted file mode 100644
index b156a80dbbad1956afde6c953b760fe3147f86dd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_3_w.bin
deleted file mode 100644
index 39ccf4d05b623c02ad5c86aa537804df697b2eca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_4_w.bin
deleted file mode 100644
index 19fa2c8035b9439be46392feee277b1e2c796994..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_5_w.bin
deleted file mode 100644
index 79d3b1efe6c1d18ce86fea69602f161425c76421..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_6_w.bin
deleted file mode 100644
index fc7d758888153e7a52ebb59e8db7822d5ca58283..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_7_w.bin
deleted file mode 100644
index d569ea19a45477b991af7bce4aa14289bb3858a4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/dense_1_b.bin
deleted file mode 100644
index dde75645d79ba2039e975a4cb2892f2cdca58038..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/dense_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-êÞ^>ÂX`¾q=Ï·‡>Hp‚>°¾¾B—b>6ÁU¾$ƒt¾½M¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/dense_1_w.bin
deleted file mode 100644
index e053b5d9d9ca19466225106fd9ad109d55e32cdb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_1_w.bin
deleted file mode 100644
index b0948ad7c455ab26b7a500823da78dd2ebdf5a2f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_2_w.bin
deleted file mode 100644
index 673879938fec8d6cea506ceba413479fe5305a72..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_3_w.bin
deleted file mode 100644
index 19e9c200ad108dcafbdac74c614b3fe637a76e0b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_4_w.bin
deleted file mode 100644
index 036b5573250744da275f27bca679c5eea90f8d67..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_5_w.bin
deleted file mode 100644
index 870049e69e3783cf45939876c6b8717033d6cce7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_6_w.bin
deleted file mode 100644
index f23ffe4c99eaac8f9f6d96d48f7312e25347f86f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/depthwise_conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/input.bin
deleted file mode 100644
index 793e873758141ad74020bcb21cfe55fda29be851..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/labels.bin
deleted file mode 100644
index af228a267c6c651a76b7d719f8d44202ed4c0eae..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/labels32.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/labels32.bin
deleted file mode 100644
index 24b800b7002207fa05a7976e08c05cf5f72aa91d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/labels32.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/layer_composition.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/layer_composition.txt
deleted file mode 100644
index 9b8b3f7e11a428a28fecbde2c204bf39b7e02703..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/layer_composition.txt
+++ /dev/null
@@ -1,41 +0,0 @@
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-pool  
-dense  add  
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/layers.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/layers.txt
deleted file mode 100644
index a9415755180a7ebdceb89b7e3e6d6cee258b18c4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/layers.txt
+++ /dev/null
@@ -1,41 +0,0 @@
-Conv1,10000,3,32,32,32,3,3,3
-#tensorBatchNorm1
-#tensorRelu1
-#tensorDepthwiseConv1
-#tensorBatchNorm2
-#tensorRelu2
-Conv2,10000,32,32,32,64,32,1,1
-#tensorBatchNorm3
-#tensorRelu3
-#tensorDepthwiseConv2
-#tensorBatchNorm4
-#tensorRelu4
-Conv3,10000,64,16,16,128,64,1,1
-#tensorBatchNorm5
-#tensorRelu5
-#tensorDepthwiseConv3
-#tensorBatchNorm6
-#tensorRelu6
-Conv4,10000,128,16,16,128,128,1,1
-#tensorBatchNorm7
-#tensorRelu7
-#tensorDepthwiseConv4
-#tensorBatchNorm8
-#tensorRelu8
-Conv5,10000,128,8,8,256,128,1,1
-#tensorBatchNorm9
-#tensorRelu9
-#tensorDepthwiseConv5
-#tensorBatchNorm10
-#tensorRelu10
-Conv6,10000,256,8,8,256,256,1,1
-#tensorBatchNorm11
-#tensorRelu11
-#tensorDepthwiseConv6
-#tensorBatchNorm12
-#tensorRelu12
-Conv7,10000,256,4,4,512,256,1,1
-#tensorBatchNorm13
-#tensorRelu13
-#tensorPooling1
-FC1,10000,2048,2048,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/promise_src.cc
deleted file mode 100644
index c5fd3606da51281bc2c583e98f024bd2f54f837b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/promise_src.cc
+++ /dev/null
@@ -1,238 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-int total_runs = 100; 
-for (int i = 0 ; i < total_runs; i++){ 
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-
-
-std::string dir_prefix = std::string("data/mobilenet_shallow_nathan/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -1.5164621164798737, 1.6472081774473288, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -9.868980642318725, 10.560956018447879, 9); 
-void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_2 = tensorRelu(var_1); 
-void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_5 = tensorRelu(var_4); 
-void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 6.821381127357554, conv2d_2_w, -1.1834390873908995, 1.2731596627235617, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -9.875998497009277, 7.51305247974393, 9); 
-void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_8 = tensorRelu(var_7); 
-void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_11 = tensorRelu(var_10); 
-void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.826067455768602, conv2d_3_w, -0.599876856982708, 0.6812073457241064, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.633289833068848, 5.177892235755925, 9); 
-void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_14 = tensorRelu(var_13); 
-void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_17 = tensorRelu(var_16); 
-void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.02646304416659, conv2d_4_w, -0.4555967862010002, 0.4942613914608956, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.316803941726685, 4.605850250244146, 9); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-void* var_22 = tensorBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_23 = tensorRelu(var_22); 
-void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 4.532649063110355, conv2d_5_w, -0.35657615590095515, 0.3382165088057521, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.1012511816024775, 4.3630500688553, 9); 
-void* var_25 = tensorBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_26 = tensorRelu(var_25); 
-void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-void* var_28 = tensorBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_29 = tensorRelu(var_28); 
-void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 3.9874704387188977, conv2d_6_w, -0.28502783328294756, 0.28604640334844594, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.243851703643799, 3.486250406742097, 9); 
-void* var_31 = tensorBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_32 = tensorRelu(var_31); 
-void* var_33 = tensorConvolution(var_32, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-void* var_34 = tensorBatchNorm(var_33, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_35 = tensorRelu(var_34); 
-void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 6.563065901756522, conv2d_7_w, -0.18946402323246003, 0.19012390717864017, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.938115713119507, 3.538363476753238, 9); 
-void* var_37 = tensorBatchNorm(var_36, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorPooling(var_38,1,2,2,0,0,2,2); 
-void* var_40 = FCLayer_PROMISE(var_39, 0.0, 1.8908388000727185, dense_1_w, -0.35140394401550296, 0.422872786462307, dense_1_b, -0.23878151, 0.26507422, -1, -14.630816223144532, 27.27252123260504, 9); 
-void* var_41 = tensorSoftmax(var_40); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_41); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-}
-
-dumpExecutionAccuracies(); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/src.cc
deleted file mode 100644
index 6599f7d0ea0be6a76c4154d25b3a7be2c6724115..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow/src.cc
+++ /dev/null
@@ -1,231 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("data/mobilenet_shallow_nathan/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_2 = tensorRelu(var_1); 
-void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_6 = tensorRelu(var_5); 
-void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_9 = tensorRelu(var_8); 
-void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_13 = tensorRelu(var_12); 
-void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_16 = tensorRelu(var_15); 
-void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_23 = tensorRelu(var_22); 
-void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_28 = tensorRelu(var_27); 
-void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_31 = tensorRelu(var_30); 
-void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_35 = tensorRelu(var_34); 
-void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_38 = tensorRelu(var_37); 
-void* var_41 = tensorConvolution(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_43 = tensorRelu(var_42); 
-void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_46 = tensorRelu(var_45); 
-void* var_47 = tensorPooling(var_46,1,2,2,0,0,2,2); 
-void* var_49 = tensorGemmGPU(var_47, dense_1_w); 
-void* var_50 = tensorAdd(var_49, dense_1_b); 
-void* var_51 = tensorSoftmax(var_50); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_51); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/approxhpvm_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/approxhpvm_src.cc
deleted file mode 100644
index dc0c873c63333299981591cb5654cb38be9d4092..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/approxhpvm_src.cc
+++ /dev/null
@@ -1,1224 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/stat.h> 
-#include <cstring> 
-#include <visc.h> 
-#include <tensorTypes.h> 
-#include <tensorUtils.h> 
-
-void var_0_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_1_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_2_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_3_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 32); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_4_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_5_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_6_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_7_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_8_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_9_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 64); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_10_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_11_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_12_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_13_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_14_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_15_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 128); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_16_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_17_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_18_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_19_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_20_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_21_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 128); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_22_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_23_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_24_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_25_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_26_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_27_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 256); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_28_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_29_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_30_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_31_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_32_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_33_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 256); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_34_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_35_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_36_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_37_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_38_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_39_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_avg(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_40_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_mul(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_41_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_42_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_softmax(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void root(void* input, size_t input_bytes, 
-	  void* conv2d_1_w, size_t conv2d_1_w_bytes, 
-	  void* batch_normalization_1_gamma, size_t batch_normalization_1_gamma_bytes, 
-	  void* batch_normalization_1_beta, size_t batch_normalization_1_beta_bytes, 
-	  void* batch_normalization_1_mean, size_t batch_normalization_1_mean_bytes, 
-	  void* batch_normalization_1_variance, size_t batch_normalization_1_variance_bytes, 
-	  void* depthwise_conv2d_1_w, size_t depthwise_conv2d_1_w_bytes, 
-	  void* batch_normalization_2_gamma, size_t batch_normalization_2_gamma_bytes, 
-	  void* batch_normalization_2_beta, size_t batch_normalization_2_beta_bytes, 
-	  void* batch_normalization_2_mean, size_t batch_normalization_2_mean_bytes, 
-	  void* batch_normalization_2_variance, size_t batch_normalization_2_variance_bytes, 
-	  void* conv2d_2_w, size_t conv2d_2_w_bytes, 
-	  void* batch_normalization_3_gamma, size_t batch_normalization_3_gamma_bytes, 
-	  void* batch_normalization_3_beta, size_t batch_normalization_3_beta_bytes, 
-	  void* batch_normalization_3_mean, size_t batch_normalization_3_mean_bytes, 
-	  void* batch_normalization_3_variance, size_t batch_normalization_3_variance_bytes, 
-	  void* depthwise_conv2d_2_w, size_t depthwise_conv2d_2_w_bytes, 
-	  void* batch_normalization_4_gamma, size_t batch_normalization_4_gamma_bytes, 
-	  void* batch_normalization_4_beta, size_t batch_normalization_4_beta_bytes, 
-	  void* batch_normalization_4_mean, size_t batch_normalization_4_mean_bytes, 
-	  void* batch_normalization_4_variance, size_t batch_normalization_4_variance_bytes, 
-	  void* conv2d_3_w, size_t conv2d_3_w_bytes, 
-	  void* batch_normalization_5_gamma, size_t batch_normalization_5_gamma_bytes, 
-	  void* batch_normalization_5_beta, size_t batch_normalization_5_beta_bytes, 
-	  void* batch_normalization_5_mean, size_t batch_normalization_5_mean_bytes, 
-	  void* batch_normalization_5_variance, size_t batch_normalization_5_variance_bytes, 
-	  void* depthwise_conv2d_3_w, size_t depthwise_conv2d_3_w_bytes, 
-	  void* batch_normalization_6_gamma, size_t batch_normalization_6_gamma_bytes, 
-	  void* batch_normalization_6_beta, size_t batch_normalization_6_beta_bytes, 
-	  void* batch_normalization_6_mean, size_t batch_normalization_6_mean_bytes, 
-	  void* batch_normalization_6_variance, size_t batch_normalization_6_variance_bytes, 
-	  void* conv2d_4_w, size_t conv2d_4_w_bytes, 
-	  void* batch_normalization_7_gamma, size_t batch_normalization_7_gamma_bytes, 
-	  void* batch_normalization_7_beta, size_t batch_normalization_7_beta_bytes, 
-	  void* batch_normalization_7_mean, size_t batch_normalization_7_mean_bytes, 
-	  void* batch_normalization_7_variance, size_t batch_normalization_7_variance_bytes, 
-	  void* depthwise_conv2d_4_w, size_t depthwise_conv2d_4_w_bytes, 
-	  void* batch_normalization_8_gamma, size_t batch_normalization_8_gamma_bytes, 
-	  void* batch_normalization_8_beta, size_t batch_normalization_8_beta_bytes, 
-	  void* batch_normalization_8_mean, size_t batch_normalization_8_mean_bytes, 
-	  void* batch_normalization_8_variance, size_t batch_normalization_8_variance_bytes, 
-	  void* conv2d_5_w, size_t conv2d_5_w_bytes, 
-	  void* batch_normalization_9_gamma, size_t batch_normalization_9_gamma_bytes, 
-	  void* batch_normalization_9_beta, size_t batch_normalization_9_beta_bytes, 
-	  void* batch_normalization_9_mean, size_t batch_normalization_9_mean_bytes, 
-	  void* batch_normalization_9_variance, size_t batch_normalization_9_variance_bytes, 
-	  void* depthwise_conv2d_5_w, size_t depthwise_conv2d_5_w_bytes, 
-	  void* batch_normalization_10_gamma, size_t batch_normalization_10_gamma_bytes, 
-	  void* batch_normalization_10_beta, size_t batch_normalization_10_beta_bytes, 
-	  void* batch_normalization_10_mean, size_t batch_normalization_10_mean_bytes, 
-	  void* batch_normalization_10_variance, size_t batch_normalization_10_variance_bytes, 
-	  void* conv2d_6_w, size_t conv2d_6_w_bytes, 
-	  void* batch_normalization_11_gamma, size_t batch_normalization_11_gamma_bytes, 
-	  void* batch_normalization_11_beta, size_t batch_normalization_11_beta_bytes, 
-	  void* batch_normalization_11_mean, size_t batch_normalization_11_mean_bytes, 
-	  void* batch_normalization_11_variance, size_t batch_normalization_11_variance_bytes, 
-	  void* depthwise_conv2d_6_w, size_t depthwise_conv2d_6_w_bytes, 
-	  void* batch_normalization_12_gamma, size_t batch_normalization_12_gamma_bytes, 
-	  void* batch_normalization_12_beta, size_t batch_normalization_12_beta_bytes, 
-	  void* batch_normalization_12_mean, size_t batch_normalization_12_mean_bytes, 
-	  void* batch_normalization_12_variance, size_t batch_normalization_12_variance_bytes, 
-	  void* conv2d_7_w, size_t conv2d_7_w_bytes, 
-	  void* batch_normalization_13_gamma, size_t batch_normalization_13_gamma_bytes, 
-	  void* batch_normalization_13_beta, size_t batch_normalization_13_beta_bytes, 
-	  void* batch_normalization_13_mean, size_t batch_normalization_13_mean_bytes, 
-	  void* batch_normalization_13_variance, size_t batch_normalization_13_variance_bytes, 
-	  void* dense_1_w, size_t dense_1_w_bytes, 
-	  void* dense_1_b, size_t dense_1_b_bytes){ 
-
-
-  __visc__hint(visc::CPU_TARGET); 
-  __visc__attributes(68, input, conv2d_1_w, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, depthwise_conv2d_1_w, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, conv2d_2_w, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, depthwise_conv2d_2_w, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, conv2d_3_w, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, depthwise_conv2d_3_w, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, conv2d_4_w, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, depthwise_conv2d_4_w, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, conv2d_5_w, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, depthwise_conv2d_5_w, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, conv2d_6_w, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, depthwise_conv2d_6_w, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, conv2d_7_w, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, dense_1_w, dense_1_b, 0); 
-
-
-  void* var_0 = __visc__createNodeND(0, var_0_node); 
-
-  __visc__bindIn(var_0, 0, 0, 0); 
-  __visc__bindIn(var_0, 1, 1, 0); 
-  __visc__bindIn(var_0, 2, 2, 0); 
-  __visc__bindIn(var_0, 3, 3, 0); 
-
-  void* var_1 = __visc__createNodeND(0, var_1_node); 
-
-  __visc__edge(var_0, var_1, 1, 0, 0, 0); 
-  __visc__edge(var_0, var_1, 1, 1, 1, 0); 
-  __visc__bindIn(var_1, 4, 2, 0); 
-  __visc__bindIn(var_1, 5, 3, 0); 
-  __visc__bindIn(var_1, 6, 4, 0); 
-  __visc__bindIn(var_1, 7, 5, 0); 
-  __visc__bindIn(var_1, 8, 6, 0); 
-  __visc__bindIn(var_1, 9, 7, 0); 
-  __visc__bindIn(var_1, 10, 8, 0); 
-  __visc__bindIn(var_1, 11, 9, 0); 
-
-  void* var_2 = __visc__createNodeND(0, var_2_node); 
-
-  __visc__edge(var_1, var_2, 1, 0, 0, 0); 
-  __visc__edge(var_1, var_2, 1, 1, 1, 0); 
-
-  void* var_3 = __visc__createNodeND(0, var_3_node); 
-
-  __visc__edge(var_2, var_3, 1, 0, 0, 0); 
-  __visc__edge(var_2, var_3, 1, 1, 1, 0); 
-  __visc__bindIn(var_3, 12, 2, 0); 
-  __visc__bindIn(var_3, 13, 3, 0); 
-
-  void* var_4 = __visc__createNodeND(0, var_4_node); 
-
-  __visc__edge(var_3, var_4, 1, 0, 0, 0); 
-  __visc__edge(var_3, var_4, 1, 1, 1, 0); 
-  __visc__bindIn(var_4, 14, 2, 0); 
-  __visc__bindIn(var_4, 15, 3, 0); 
-  __visc__bindIn(var_4, 16, 4, 0); 
-  __visc__bindIn(var_4, 17, 5, 0); 
-  __visc__bindIn(var_4, 18, 6, 0); 
-  __visc__bindIn(var_4, 19, 7, 0); 
-  __visc__bindIn(var_4, 20, 8, 0); 
-  __visc__bindIn(var_4, 21, 9, 0); 
-
-  void* var_5 = __visc__createNodeND(0, var_5_node); 
-
-  __visc__edge(var_4, var_5, 1, 0, 0, 0); 
-  __visc__edge(var_4, var_5, 1, 1, 1, 0); 
-
-  void* var_6 = __visc__createNodeND(0, var_6_node); 
-
-  __visc__edge(var_5, var_6, 1, 0, 0, 0); 
-  __visc__edge(var_5, var_6, 1, 1, 1, 0); 
-  __visc__bindIn(var_6, 22, 2, 0); 
-  __visc__bindIn(var_6, 23, 3, 0); 
-
-  void* var_7 = __visc__createNodeND(0, var_7_node); 
-
-  __visc__edge(var_6, var_7, 1, 0, 0, 0); 
-  __visc__edge(var_6, var_7, 1, 1, 1, 0); 
-  __visc__bindIn(var_7, 24, 2, 0); 
-  __visc__bindIn(var_7, 25, 3, 0); 
-  __visc__bindIn(var_7, 26, 4, 0); 
-  __visc__bindIn(var_7, 27, 5, 0); 
-  __visc__bindIn(var_7, 28, 6, 0); 
-  __visc__bindIn(var_7, 29, 7, 0); 
-  __visc__bindIn(var_7, 30, 8, 0); 
-  __visc__bindIn(var_7, 31, 9, 0); 
-
-  void* var_8 = __visc__createNodeND(0, var_8_node); 
-
-  __visc__edge(var_7, var_8, 1, 0, 0, 0); 
-  __visc__edge(var_7, var_8, 1, 1, 1, 0); 
-
-  void* var_9 = __visc__createNodeND(0, var_9_node); 
-
-  __visc__edge(var_8, var_9, 1, 0, 0, 0); 
-  __visc__edge(var_8, var_9, 1, 1, 1, 0); 
-  __visc__bindIn(var_9, 32, 2, 0); 
-  __visc__bindIn(var_9, 33, 3, 0); 
-
-  void* var_10 = __visc__createNodeND(0, var_10_node); 
-
-  __visc__edge(var_9, var_10, 1, 0, 0, 0); 
-  __visc__edge(var_9, var_10, 1, 1, 1, 0); 
-  __visc__bindIn(var_10, 34, 2, 0); 
-  __visc__bindIn(var_10, 35, 3, 0); 
-  __visc__bindIn(var_10, 36, 4, 0); 
-  __visc__bindIn(var_10, 37, 5, 0); 
-  __visc__bindIn(var_10, 38, 6, 0); 
-  __visc__bindIn(var_10, 39, 7, 0); 
-  __visc__bindIn(var_10, 40, 8, 0); 
-  __visc__bindIn(var_10, 41, 9, 0); 
-
-  void* var_11 = __visc__createNodeND(0, var_11_node); 
-
-  __visc__edge(var_10, var_11, 1, 0, 0, 0); 
-  __visc__edge(var_10, var_11, 1, 1, 1, 0); 
-
-  void* var_12 = __visc__createNodeND(0, var_12_node); 
-
-  __visc__edge(var_11, var_12, 1, 0, 0, 0); 
-  __visc__edge(var_11, var_12, 1, 1, 1, 0); 
-  __visc__bindIn(var_12, 42, 2, 0); 
-  __visc__bindIn(var_12, 43, 3, 0); 
-
-  void* var_13 = __visc__createNodeND(0, var_13_node); 
-
-  __visc__edge(var_12, var_13, 1, 0, 0, 0); 
-  __visc__edge(var_12, var_13, 1, 1, 1, 0); 
-  __visc__bindIn(var_13, 44, 2, 0); 
-  __visc__bindIn(var_13, 45, 3, 0); 
-  __visc__bindIn(var_13, 46, 4, 0); 
-  __visc__bindIn(var_13, 47, 5, 0); 
-  __visc__bindIn(var_13, 48, 6, 0); 
-  __visc__bindIn(var_13, 49, 7, 0); 
-  __visc__bindIn(var_13, 50, 8, 0); 
-  __visc__bindIn(var_13, 51, 9, 0); 
-
-  void* var_14 = __visc__createNodeND(0, var_14_node); 
-
-  __visc__edge(var_13, var_14, 1, 0, 0, 0); 
-  __visc__edge(var_13, var_14, 1, 1, 1, 0); 
-
-  void* var_15 = __visc__createNodeND(0, var_15_node); 
-
-  __visc__edge(var_14, var_15, 1, 0, 0, 0); 
-  __visc__edge(var_14, var_15, 1, 1, 1, 0); 
-  __visc__bindIn(var_15, 52, 2, 0); 
-  __visc__bindIn(var_15, 53, 3, 0); 
-
-  void* var_16 = __visc__createNodeND(0, var_16_node); 
-
-  __visc__edge(var_15, var_16, 1, 0, 0, 0); 
-  __visc__edge(var_15, var_16, 1, 1, 1, 0); 
-  __visc__bindIn(var_16, 54, 2, 0); 
-  __visc__bindIn(var_16, 55, 3, 0); 
-  __visc__bindIn(var_16, 56, 4, 0); 
-  __visc__bindIn(var_16, 57, 5, 0); 
-  __visc__bindIn(var_16, 58, 6, 0); 
-  __visc__bindIn(var_16, 59, 7, 0); 
-  __visc__bindIn(var_16, 60, 8, 0); 
-  __visc__bindIn(var_16, 61, 9, 0); 
-
-  void* var_17 = __visc__createNodeND(0, var_17_node); 
-
-  __visc__edge(var_16, var_17, 1, 0, 0, 0); 
-  __visc__edge(var_16, var_17, 1, 1, 1, 0); 
-
-  void* var_18 = __visc__createNodeND(0, var_18_node); 
-
-  __visc__edge(var_17, var_18, 1, 0, 0, 0); 
-  __visc__edge(var_17, var_18, 1, 1, 1, 0); 
-  __visc__bindIn(var_18, 62, 2, 0); 
-  __visc__bindIn(var_18, 63, 3, 0); 
-
-  void* var_19 = __visc__createNodeND(0, var_19_node); 
-
-  __visc__edge(var_18, var_19, 1, 0, 0, 0); 
-  __visc__edge(var_18, var_19, 1, 1, 1, 0); 
-  __visc__bindIn(var_19, 64, 2, 0); 
-  __visc__bindIn(var_19, 65, 3, 0); 
-  __visc__bindIn(var_19, 66, 4, 0); 
-  __visc__bindIn(var_19, 67, 5, 0); 
-  __visc__bindIn(var_19, 68, 6, 0); 
-  __visc__bindIn(var_19, 69, 7, 0); 
-  __visc__bindIn(var_19, 70, 8, 0); 
-  __visc__bindIn(var_19, 71, 9, 0); 
-
-  void* var_20 = __visc__createNodeND(0, var_20_node); 
-
-  __visc__edge(var_19, var_20, 1, 0, 0, 0); 
-  __visc__edge(var_19, var_20, 1, 1, 1, 0); 
-
-  void* var_21 = __visc__createNodeND(0, var_21_node); 
-
-  __visc__edge(var_20, var_21, 1, 0, 0, 0); 
-  __visc__edge(var_20, var_21, 1, 1, 1, 0); 
-  __visc__bindIn(var_21, 72, 2, 0); 
-  __visc__bindIn(var_21, 73, 3, 0); 
-
-  void* var_22 = __visc__createNodeND(0, var_22_node); 
-
-  __visc__edge(var_21, var_22, 1, 0, 0, 0); 
-  __visc__edge(var_21, var_22, 1, 1, 1, 0); 
-  __visc__bindIn(var_22, 74, 2, 0); 
-  __visc__bindIn(var_22, 75, 3, 0); 
-  __visc__bindIn(var_22, 76, 4, 0); 
-  __visc__bindIn(var_22, 77, 5, 0); 
-  __visc__bindIn(var_22, 78, 6, 0); 
-  __visc__bindIn(var_22, 79, 7, 0); 
-  __visc__bindIn(var_22, 80, 8, 0); 
-  __visc__bindIn(var_22, 81, 9, 0); 
-
-  void* var_23 = __visc__createNodeND(0, var_23_node); 
-
-  __visc__edge(var_22, var_23, 1, 0, 0, 0); 
-  __visc__edge(var_22, var_23, 1, 1, 1, 0); 
-
-  void* var_24 = __visc__createNodeND(0, var_24_node); 
-
-  __visc__edge(var_23, var_24, 1, 0, 0, 0); 
-  __visc__edge(var_23, var_24, 1, 1, 1, 0); 
-  __visc__bindIn(var_24, 82, 2, 0); 
-  __visc__bindIn(var_24, 83, 3, 0); 
-
-  void* var_25 = __visc__createNodeND(0, var_25_node); 
-
-  __visc__edge(var_24, var_25, 1, 0, 0, 0); 
-  __visc__edge(var_24, var_25, 1, 1, 1, 0); 
-  __visc__bindIn(var_25, 84, 2, 0); 
-  __visc__bindIn(var_25, 85, 3, 0); 
-  __visc__bindIn(var_25, 86, 4, 0); 
-  __visc__bindIn(var_25, 87, 5, 0); 
-  __visc__bindIn(var_25, 88, 6, 0); 
-  __visc__bindIn(var_25, 89, 7, 0); 
-  __visc__bindIn(var_25, 90, 8, 0); 
-  __visc__bindIn(var_25, 91, 9, 0); 
-
-  void* var_26 = __visc__createNodeND(0, var_26_node); 
-
-  __visc__edge(var_25, var_26, 1, 0, 0, 0); 
-  __visc__edge(var_25, var_26, 1, 1, 1, 0); 
-
-  void* var_27 = __visc__createNodeND(0, var_27_node); 
-
-  __visc__edge(var_26, var_27, 1, 0, 0, 0); 
-  __visc__edge(var_26, var_27, 1, 1, 1, 0); 
-  __visc__bindIn(var_27, 92, 2, 0); 
-  __visc__bindIn(var_27, 93, 3, 0); 
-
-  void* var_28 = __visc__createNodeND(0, var_28_node); 
-
-  __visc__edge(var_27, var_28, 1, 0, 0, 0); 
-  __visc__edge(var_27, var_28, 1, 1, 1, 0); 
-  __visc__bindIn(var_28, 94, 2, 0); 
-  __visc__bindIn(var_28, 95, 3, 0); 
-  __visc__bindIn(var_28, 96, 4, 0); 
-  __visc__bindIn(var_28, 97, 5, 0); 
-  __visc__bindIn(var_28, 98, 6, 0); 
-  __visc__bindIn(var_28, 99, 7, 0); 
-  __visc__bindIn(var_28, 100, 8, 0); 
-  __visc__bindIn(var_28, 101, 9, 0); 
-
-  void* var_29 = __visc__createNodeND(0, var_29_node); 
-
-  __visc__edge(var_28, var_29, 1, 0, 0, 0); 
-  __visc__edge(var_28, var_29, 1, 1, 1, 0); 
-
-  void* var_30 = __visc__createNodeND(0, var_30_node); 
-
-  __visc__edge(var_29, var_30, 1, 0, 0, 0); 
-  __visc__edge(var_29, var_30, 1, 1, 1, 0); 
-  __visc__bindIn(var_30, 102, 2, 0); 
-  __visc__bindIn(var_30, 103, 3, 0); 
-
-  void* var_31 = __visc__createNodeND(0, var_31_node); 
-
-  __visc__edge(var_30, var_31, 1, 0, 0, 0); 
-  __visc__edge(var_30, var_31, 1, 1, 1, 0); 
-  __visc__bindIn(var_31, 104, 2, 0); 
-  __visc__bindIn(var_31, 105, 3, 0); 
-  __visc__bindIn(var_31, 106, 4, 0); 
-  __visc__bindIn(var_31, 107, 5, 0); 
-  __visc__bindIn(var_31, 108, 6, 0); 
-  __visc__bindIn(var_31, 109, 7, 0); 
-  __visc__bindIn(var_31, 110, 8, 0); 
-  __visc__bindIn(var_31, 111, 9, 0); 
-
-  void* var_32 = __visc__createNodeND(0, var_32_node); 
-
-  __visc__edge(var_31, var_32, 1, 0, 0, 0); 
-  __visc__edge(var_31, var_32, 1, 1, 1, 0); 
-
-  void* var_33 = __visc__createNodeND(0, var_33_node); 
-
-  __visc__edge(var_32, var_33, 1, 0, 0, 0); 
-  __visc__edge(var_32, var_33, 1, 1, 1, 0); 
-  __visc__bindIn(var_33, 112, 2, 0); 
-  __visc__bindIn(var_33, 113, 3, 0); 
-
-  void* var_34 = __visc__createNodeND(0, var_34_node); 
-
-  __visc__edge(var_33, var_34, 1, 0, 0, 0); 
-  __visc__edge(var_33, var_34, 1, 1, 1, 0); 
-  __visc__bindIn(var_34, 114, 2, 0); 
-  __visc__bindIn(var_34, 115, 3, 0); 
-  __visc__bindIn(var_34, 116, 4, 0); 
-  __visc__bindIn(var_34, 117, 5, 0); 
-  __visc__bindIn(var_34, 118, 6, 0); 
-  __visc__bindIn(var_34, 119, 7, 0); 
-  __visc__bindIn(var_34, 120, 8, 0); 
-  __visc__bindIn(var_34, 121, 9, 0); 
-
-  void* var_35 = __visc__createNodeND(0, var_35_node); 
-
-  __visc__edge(var_34, var_35, 1, 0, 0, 0); 
-  __visc__edge(var_34, var_35, 1, 1, 1, 0); 
-
-  void* var_36 = __visc__createNodeND(0, var_36_node); 
-
-  __visc__edge(var_35, var_36, 1, 0, 0, 0); 
-  __visc__edge(var_35, var_36, 1, 1, 1, 0); 
-  __visc__bindIn(var_36, 122, 2, 0); 
-  __visc__bindIn(var_36, 123, 3, 0); 
-
-  void* var_37 = __visc__createNodeND(0, var_37_node); 
-
-  __visc__edge(var_36, var_37, 1, 0, 0, 0); 
-  __visc__edge(var_36, var_37, 1, 1, 1, 0); 
-  __visc__bindIn(var_37, 124, 2, 0); 
-  __visc__bindIn(var_37, 125, 3, 0); 
-  __visc__bindIn(var_37, 126, 4, 0); 
-  __visc__bindIn(var_37, 127, 5, 0); 
-  __visc__bindIn(var_37, 128, 6, 0); 
-  __visc__bindIn(var_37, 129, 7, 0); 
-  __visc__bindIn(var_37, 130, 8, 0); 
-  __visc__bindIn(var_37, 131, 9, 0); 
-
-  void* var_38 = __visc__createNodeND(0, var_38_node); 
-
-  __visc__edge(var_37, var_38, 1, 0, 0, 0); 
-  __visc__edge(var_37, var_38, 1, 1, 1, 0); 
-
-  void* var_39 = __visc__createNodeND(0, var_39_node); 
-
-  __visc__edge(var_38, var_39, 1, 0, 0, 0); 
-  __visc__edge(var_38, var_39, 1, 1, 1, 0); 
-
-  void* var_40 = __visc__createNodeND(0, var_40_node); 
-
-  __visc__edge(var_39, var_40, 1, 0, 0, 0); 
-  __visc__edge(var_39, var_40, 1, 1, 1, 0); 
-  __visc__bindIn(var_40, 132, 2, 0); 
-  __visc__bindIn(var_40, 133, 3, 0); 
-
-  void* var_41 = __visc__createNodeND(0, var_41_node); 
-
-  __visc__edge(var_40, var_41, 1, 0, 0, 0); 
-  __visc__edge(var_40, var_41, 1, 1, 1, 0); 
-  __visc__bindIn(var_41, 134, 2, 0); 
-  __visc__bindIn(var_41, 135, 3, 0); 
-
-  void* var_42 = __visc__createNodeND(0, var_42_node); 
-
-  __visc__edge(var_41, var_42, 1, 0, 0, 0); 
-  __visc__edge(var_41, var_42, 1, 1, 1, 0); 
-
-  __visc__bindOut(var_42, 0, 0, 0); 
-  __visc__bindOut(var_42, 1, 1, 0); 
-
-}
-
-struct ret_t {
-  void* tensor; 
-  size_t bytes; 
-}; 
-
-typedef struct __attribute__((__packed__)) {
-  void* input; 
-  size_t input_bytes; 
-  void* conv2d_1_w; 
-  size_t conv2d_1_w_bytes; 
-  void* batch_normalization_1_gamma; 
-  size_t batch_normalization_1_gamma_bytes; 
-  void* batch_normalization_1_beta; 
-  size_t batch_normalization_1_beta_bytes; 
-  void* batch_normalization_1_mean; 
-  size_t batch_normalization_1_mean_bytes; 
-  void* batch_normalization_1_variance; 
-  size_t batch_normalization_1_variance_bytes; 
-  void* depthwise_conv2d_1_w; 
-  size_t depthwise_conv2d_1_w_bytes; 
-  void* batch_normalization_2_gamma; 
-  size_t batch_normalization_2_gamma_bytes; 
-  void* batch_normalization_2_beta; 
-  size_t batch_normalization_2_beta_bytes; 
-  void* batch_normalization_2_mean; 
-  size_t batch_normalization_2_mean_bytes; 
-  void* batch_normalization_2_variance; 
-  size_t batch_normalization_2_variance_bytes; 
-  void* conv2d_2_w; 
-  size_t conv2d_2_w_bytes; 
-  void* batch_normalization_3_gamma; 
-  size_t batch_normalization_3_gamma_bytes; 
-  void* batch_normalization_3_beta; 
-  size_t batch_normalization_3_beta_bytes; 
-  void* batch_normalization_3_mean; 
-  size_t batch_normalization_3_mean_bytes; 
-  void* batch_normalization_3_variance; 
-  size_t batch_normalization_3_variance_bytes; 
-  void* depthwise_conv2d_2_w; 
-  size_t depthwise_conv2d_2_w_bytes; 
-  void* batch_normalization_4_gamma; 
-  size_t batch_normalization_4_gamma_bytes; 
-  void* batch_normalization_4_beta; 
-  size_t batch_normalization_4_beta_bytes; 
-  void* batch_normalization_4_mean; 
-  size_t batch_normalization_4_mean_bytes; 
-  void* batch_normalization_4_variance; 
-  size_t batch_normalization_4_variance_bytes; 
-  void* conv2d_3_w; 
-  size_t conv2d_3_w_bytes; 
-  void* batch_normalization_5_gamma; 
-  size_t batch_normalization_5_gamma_bytes; 
-  void* batch_normalization_5_beta; 
-  size_t batch_normalization_5_beta_bytes; 
-  void* batch_normalization_5_mean; 
-  size_t batch_normalization_5_mean_bytes; 
-  void* batch_normalization_5_variance; 
-  size_t batch_normalization_5_variance_bytes; 
-  void* depthwise_conv2d_3_w; 
-  size_t depthwise_conv2d_3_w_bytes; 
-  void* batch_normalization_6_gamma; 
-  size_t batch_normalization_6_gamma_bytes; 
-  void* batch_normalization_6_beta; 
-  size_t batch_normalization_6_beta_bytes; 
-  void* batch_normalization_6_mean; 
-  size_t batch_normalization_6_mean_bytes; 
-  void* batch_normalization_6_variance; 
-  size_t batch_normalization_6_variance_bytes; 
-  void* conv2d_4_w; 
-  size_t conv2d_4_w_bytes; 
-  void* batch_normalization_7_gamma; 
-  size_t batch_normalization_7_gamma_bytes; 
-  void* batch_normalization_7_beta; 
-  size_t batch_normalization_7_beta_bytes; 
-  void* batch_normalization_7_mean; 
-  size_t batch_normalization_7_mean_bytes; 
-  void* batch_normalization_7_variance; 
-  size_t batch_normalization_7_variance_bytes; 
-  void* depthwise_conv2d_4_w; 
-  size_t depthwise_conv2d_4_w_bytes; 
-  void* batch_normalization_8_gamma; 
-  size_t batch_normalization_8_gamma_bytes; 
-  void* batch_normalization_8_beta; 
-  size_t batch_normalization_8_beta_bytes; 
-  void* batch_normalization_8_mean; 
-  size_t batch_normalization_8_mean_bytes; 
-  void* batch_normalization_8_variance; 
-  size_t batch_normalization_8_variance_bytes; 
-  void* conv2d_5_w; 
-  size_t conv2d_5_w_bytes; 
-  void* batch_normalization_9_gamma; 
-  size_t batch_normalization_9_gamma_bytes; 
-  void* batch_normalization_9_beta; 
-  size_t batch_normalization_9_beta_bytes; 
-  void* batch_normalization_9_mean; 
-  size_t batch_normalization_9_mean_bytes; 
-  void* batch_normalization_9_variance; 
-  size_t batch_normalization_9_variance_bytes; 
-  void* depthwise_conv2d_5_w; 
-  size_t depthwise_conv2d_5_w_bytes; 
-  void* batch_normalization_10_gamma; 
-  size_t batch_normalization_10_gamma_bytes; 
-  void* batch_normalization_10_beta; 
-  size_t batch_normalization_10_beta_bytes; 
-  void* batch_normalization_10_mean; 
-  size_t batch_normalization_10_mean_bytes; 
-  void* batch_normalization_10_variance; 
-  size_t batch_normalization_10_variance_bytes; 
-  void* conv2d_6_w; 
-  size_t conv2d_6_w_bytes; 
-  void* batch_normalization_11_gamma; 
-  size_t batch_normalization_11_gamma_bytes; 
-  void* batch_normalization_11_beta; 
-  size_t batch_normalization_11_beta_bytes; 
-  void* batch_normalization_11_mean; 
-  size_t batch_normalization_11_mean_bytes; 
-  void* batch_normalization_11_variance; 
-  size_t batch_normalization_11_variance_bytes; 
-  void* depthwise_conv2d_6_w; 
-  size_t depthwise_conv2d_6_w_bytes; 
-  void* batch_normalization_12_gamma; 
-  size_t batch_normalization_12_gamma_bytes; 
-  void* batch_normalization_12_beta; 
-  size_t batch_normalization_12_beta_bytes; 
-  void* batch_normalization_12_mean; 
-  size_t batch_normalization_12_mean_bytes; 
-  void* batch_normalization_12_variance; 
-  size_t batch_normalization_12_variance_bytes; 
-  void* conv2d_7_w; 
-  size_t conv2d_7_w_bytes; 
-  void* batch_normalization_13_gamma; 
-  size_t batch_normalization_13_gamma_bytes; 
-  void* batch_normalization_13_beta; 
-  size_t batch_normalization_13_beta_bytes; 
-  void* batch_normalization_13_mean; 
-  size_t batch_normalization_13_mean_bytes; 
-  void* batch_normalization_13_variance; 
-  size_t batch_normalization_13_variance_bytes; 
-  void* dense_1_w; 
-  size_t dense_1_w_bytes; 
-  void* dense_1_b; 
-  size_t dense_1_b_bytes; 
-
-  struct ret_t r; 
-}
-RootIn;
-
-int main(){ 
-
-std::string dir_prefix = std::string("data/mobilenet_shallow_nathan/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-
-__visc__init(); 
-RootIn* args = static_cast<RootIn*>(malloc(sizeof(RootIn))); 
-
-args->input = input; 
-args->input_bytes = 0; 
-args->conv2d_1_w = conv2d_1_w; 
-args->conv2d_1_w_bytes = 0; 
-args->batch_normalization_1_gamma = batch_normalization_1_gamma; 
-args->batch_normalization_1_gamma_bytes = 0; 
-args->batch_normalization_1_beta = batch_normalization_1_beta; 
-args->batch_normalization_1_beta_bytes = 0; 
-args->batch_normalization_1_mean = batch_normalization_1_mean; 
-args->batch_normalization_1_mean_bytes = 0; 
-args->batch_normalization_1_variance = batch_normalization_1_variance; 
-args->batch_normalization_1_variance_bytes = 0; 
-args->depthwise_conv2d_1_w = depthwise_conv2d_1_w; 
-args->depthwise_conv2d_1_w_bytes = 0; 
-args->batch_normalization_2_gamma = batch_normalization_2_gamma; 
-args->batch_normalization_2_gamma_bytes = 0; 
-args->batch_normalization_2_beta = batch_normalization_2_beta; 
-args->batch_normalization_2_beta_bytes = 0; 
-args->batch_normalization_2_mean = batch_normalization_2_mean; 
-args->batch_normalization_2_mean_bytes = 0; 
-args->batch_normalization_2_variance = batch_normalization_2_variance; 
-args->batch_normalization_2_variance_bytes = 0; 
-args->conv2d_2_w = conv2d_2_w; 
-args->conv2d_2_w_bytes = 0; 
-args->batch_normalization_3_gamma = batch_normalization_3_gamma; 
-args->batch_normalization_3_gamma_bytes = 0; 
-args->batch_normalization_3_beta = batch_normalization_3_beta; 
-args->batch_normalization_3_beta_bytes = 0; 
-args->batch_normalization_3_mean = batch_normalization_3_mean; 
-args->batch_normalization_3_mean_bytes = 0; 
-args->batch_normalization_3_variance = batch_normalization_3_variance; 
-args->batch_normalization_3_variance_bytes = 0; 
-args->depthwise_conv2d_2_w = depthwise_conv2d_2_w; 
-args->depthwise_conv2d_2_w_bytes = 0; 
-args->batch_normalization_4_gamma = batch_normalization_4_gamma; 
-args->batch_normalization_4_gamma_bytes = 0; 
-args->batch_normalization_4_beta = batch_normalization_4_beta; 
-args->batch_normalization_4_beta_bytes = 0; 
-args->batch_normalization_4_mean = batch_normalization_4_mean; 
-args->batch_normalization_4_mean_bytes = 0; 
-args->batch_normalization_4_variance = batch_normalization_4_variance; 
-args->batch_normalization_4_variance_bytes = 0; 
-args->conv2d_3_w = conv2d_3_w; 
-args->conv2d_3_w_bytes = 0; 
-args->batch_normalization_5_gamma = batch_normalization_5_gamma; 
-args->batch_normalization_5_gamma_bytes = 0; 
-args->batch_normalization_5_beta = batch_normalization_5_beta; 
-args->batch_normalization_5_beta_bytes = 0; 
-args->batch_normalization_5_mean = batch_normalization_5_mean; 
-args->batch_normalization_5_mean_bytes = 0; 
-args->batch_normalization_5_variance = batch_normalization_5_variance; 
-args->batch_normalization_5_variance_bytes = 0; 
-args->depthwise_conv2d_3_w = depthwise_conv2d_3_w; 
-args->depthwise_conv2d_3_w_bytes = 0; 
-args->batch_normalization_6_gamma = batch_normalization_6_gamma; 
-args->batch_normalization_6_gamma_bytes = 0; 
-args->batch_normalization_6_beta = batch_normalization_6_beta; 
-args->batch_normalization_6_beta_bytes = 0; 
-args->batch_normalization_6_mean = batch_normalization_6_mean; 
-args->batch_normalization_6_mean_bytes = 0; 
-args->batch_normalization_6_variance = batch_normalization_6_variance; 
-args->batch_normalization_6_variance_bytes = 0; 
-args->conv2d_4_w = conv2d_4_w; 
-args->conv2d_4_w_bytes = 0; 
-args->batch_normalization_7_gamma = batch_normalization_7_gamma; 
-args->batch_normalization_7_gamma_bytes = 0; 
-args->batch_normalization_7_beta = batch_normalization_7_beta; 
-args->batch_normalization_7_beta_bytes = 0; 
-args->batch_normalization_7_mean = batch_normalization_7_mean; 
-args->batch_normalization_7_mean_bytes = 0; 
-args->batch_normalization_7_variance = batch_normalization_7_variance; 
-args->batch_normalization_7_variance_bytes = 0; 
-args->depthwise_conv2d_4_w = depthwise_conv2d_4_w; 
-args->depthwise_conv2d_4_w_bytes = 0; 
-args->batch_normalization_8_gamma = batch_normalization_8_gamma; 
-args->batch_normalization_8_gamma_bytes = 0; 
-args->batch_normalization_8_beta = batch_normalization_8_beta; 
-args->batch_normalization_8_beta_bytes = 0; 
-args->batch_normalization_8_mean = batch_normalization_8_mean; 
-args->batch_normalization_8_mean_bytes = 0; 
-args->batch_normalization_8_variance = batch_normalization_8_variance; 
-args->batch_normalization_8_variance_bytes = 0; 
-args->conv2d_5_w = conv2d_5_w; 
-args->conv2d_5_w_bytes = 0; 
-args->batch_normalization_9_gamma = batch_normalization_9_gamma; 
-args->batch_normalization_9_gamma_bytes = 0; 
-args->batch_normalization_9_beta = batch_normalization_9_beta; 
-args->batch_normalization_9_beta_bytes = 0; 
-args->batch_normalization_9_mean = batch_normalization_9_mean; 
-args->batch_normalization_9_mean_bytes = 0; 
-args->batch_normalization_9_variance = batch_normalization_9_variance; 
-args->batch_normalization_9_variance_bytes = 0; 
-args->depthwise_conv2d_5_w = depthwise_conv2d_5_w; 
-args->depthwise_conv2d_5_w_bytes = 0; 
-args->batch_normalization_10_gamma = batch_normalization_10_gamma; 
-args->batch_normalization_10_gamma_bytes = 0; 
-args->batch_normalization_10_beta = batch_normalization_10_beta; 
-args->batch_normalization_10_beta_bytes = 0; 
-args->batch_normalization_10_mean = batch_normalization_10_mean; 
-args->batch_normalization_10_mean_bytes = 0; 
-args->batch_normalization_10_variance = batch_normalization_10_variance; 
-args->batch_normalization_10_variance_bytes = 0; 
-args->conv2d_6_w = conv2d_6_w; 
-args->conv2d_6_w_bytes = 0; 
-args->batch_normalization_11_gamma = batch_normalization_11_gamma; 
-args->batch_normalization_11_gamma_bytes = 0; 
-args->batch_normalization_11_beta = batch_normalization_11_beta; 
-args->batch_normalization_11_beta_bytes = 0; 
-args->batch_normalization_11_mean = batch_normalization_11_mean; 
-args->batch_normalization_11_mean_bytes = 0; 
-args->batch_normalization_11_variance = batch_normalization_11_variance; 
-args->batch_normalization_11_variance_bytes = 0; 
-args->depthwise_conv2d_6_w = depthwise_conv2d_6_w; 
-args->depthwise_conv2d_6_w_bytes = 0; 
-args->batch_normalization_12_gamma = batch_normalization_12_gamma; 
-args->batch_normalization_12_gamma_bytes = 0; 
-args->batch_normalization_12_beta = batch_normalization_12_beta; 
-args->batch_normalization_12_beta_bytes = 0; 
-args->batch_normalization_12_mean = batch_normalization_12_mean; 
-args->batch_normalization_12_mean_bytes = 0; 
-args->batch_normalization_12_variance = batch_normalization_12_variance; 
-args->batch_normalization_12_variance_bytes = 0; 
-args->conv2d_7_w = conv2d_7_w; 
-args->conv2d_7_w_bytes = 0; 
-args->batch_normalization_13_gamma = batch_normalization_13_gamma; 
-args->batch_normalization_13_gamma_bytes = 0; 
-args->batch_normalization_13_beta = batch_normalization_13_beta; 
-args->batch_normalization_13_beta_bytes = 0; 
-args->batch_normalization_13_mean = batch_normalization_13_mean; 
-args->batch_normalization_13_mean_bytes = 0; 
-args->batch_normalization_13_variance = batch_normalization_13_variance; 
-args->batch_normalization_13_variance_bytes = 0; 
-args->dense_1_w = dense_1_w; 
-args->dense_1_w_bytes = 0; 
-args->dense_1_b = dense_1_b; 
-args->dense_1_b_bytes = 0; 
-
-void* dfg = __visc__launch(0, root, (void*) args); 
-
-__visc__wait(dfg); 
-
-void *result = static_cast<RootIn*>(args)->input; 
-hpvm_request_tensor(result, 0); 
-
-__visc__cleanup(); 
- computeAccuracy2(labels, 10000, result); 
-return 0; 
-
-} 
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_beta.bin
deleted file mode 100644
index 5d9a0d95865637cfb783fb9a56d3ff2ecb57e868..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_gamma.bin
deleted file mode 100644
index 71147ba51b53f9b5f8ed84d3e12b3f60d04e88f0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_mean.bin
deleted file mode 100644
index f75ef27a6bde8cf45607b0e7957603ad5c767928..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_variance.bin
deleted file mode 100644
index cdbb02d6dcc67a983c949224e5ef2356cbed70ec..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_10_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_beta.bin
deleted file mode 100644
index a6d770acd50df688be127899d5ebc76a6b660108..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_gamma.bin
deleted file mode 100644
index 7d2add83b878940a6e83ff33ac8328b08218b036..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_mean.bin
deleted file mode 100644
index 481fa2d212a171377d79b38765b42481939abd0f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_variance.bin
deleted file mode 100644
index 99b00e0a82730dbf49cc6112379b6106b3538f24..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_11_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_beta.bin
deleted file mode 100644
index e2fa099a1b5df7840c7b5b2c8b9ec83bad07f238..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_gamma.bin
deleted file mode 100644
index 2c6d46a8c35a83ea5929e7b0b06980baf1ea8b08..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_mean.bin
deleted file mode 100644
index 4c46529e2774bb4fed9337394213ddfd6fa3b7a4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_variance.bin
deleted file mode 100644
index 8afde358ed8dffed9eca531e3ced41953036c926..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_12_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_beta.bin
deleted file mode 100644
index 5192e8414e7349eb49f139c31d688349dfcaa915..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_gamma.bin
deleted file mode 100644
index c7ba0b707e96c024bbdcf825a28f78522685b7e2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_mean.bin
deleted file mode 100644
index 41c23352862bc90c6cb298fbda821712c919673b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_variance.bin
deleted file mode 100644
index 67aa92699f5da3e6384e2502fce4cf985d207e2c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_13_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_beta.bin
deleted file mode 100644
index 05d61c8e00f196b83dde7de794cc9feff2929582..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-êâ®>@Hx>„jb¾Çå>:Ù*>YR¡>nÍ>ù}u?ýæ>¤ÌB?|¾bç&?ǁO?e=”?¿‹¸¾f'e¾8ƒ¸½è‹Î;Éï:?Ó
-‹?ˆþ„>½»>ŸÞ>î‚?˜5A?|6žàÒ>²•ݼÞ>?Ä1?nŸ¾u‡‚>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_gamma.bin
deleted file mode 100644
index 1aaaeaa110d8b9eb8108a1546302f8d5c1c12c35..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-³]ƒ?Ç/?®0€?]…o?­ƒ?ß­Œ?Vň?`Z?Gk„?±*`?žÐƒ?pÈ~?
*?+g ?4
-u?Òü?Qƒ?|?š	O??ä?K{?^5Œ?ÃÜ‚?£?…ØA?žTr?1y?÷€?!56?&ñƒ?HWv?§^‚?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_mean.bin
deleted file mode 100644
index ffcd3adfeac9b601872fa59caa42601fdc10494c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-ˆG¢¹$=ã®Ý;9Oü¼ø-'<p;K<
à»Þâ€;žcé¼K!¼“¿œ<õön¼M9 ;¬…;’½î3ì<¯Á;¨`…<|¾¼þæ:¼;ÿ%=÷»L¶Z»)z·»ªfºS:Œ<j*>»¾M<¨u» û;%;þ;ñ&J:
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_variance.bin
deleted file mode 100644
index f29dc5a9db7e4fe9783917749bd151ce80e40702..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_1_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-ÃÅn@éA²£š?ä+@"@9ÛÞ@áÀ(@•¹÷>Ò¢Á@ƒ'E@)¡@øˆ+@œZž>«Ç?A?¤A˜x°A0ªª?®¯#AÿΕ@«Vì>~ÑÅAg«“@VúAÿ>;j@š”@j¯ø?.AB¾>œê;@ø#û?Q	~@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_beta.bin
deleted file mode 100644
index ba12532332cec1d6ee20d16d04be81575a8f0802..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_beta.bin
+++ /dev/null
@@ -1 +0,0 @@
-N?¶k©¾¬r>{_?kÙy¾R?fÀ?ä%Q?“k¾åœ­?—õ^½go?=9L>A†?ím½Ôm	¿†ǽR?²¾íO‡?àhv?ìt4¾ÙN?cá~?i«Ÿ?¹[?•ï¾<M_>Êõö¾>ðn½rÞ¾?	Ž?¼êªE=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_gamma.bin
deleted file mode 100644
index bf0dd075d19280dfcda711fd95eeab6fb429b8f5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-L¾ö>üiQ?-)]?!0?Lw_?`Ë…?ö%4?sÚ¢?²È7?œé`?Ò¿b?¬Y?hv?MC	@\ÞY?ñ8P?”Ü\?»QI?ë8?ʤ‘?Ëîl?`¥&?,S?&›?wxr?Ãl_?@8l?k<,?6Ä…?Y={?YjC?2L>?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_mean.bin
deleted file mode 100644
index faec424f63fab99e4ba00101a005c1b84cb2f8f3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_mean.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-’D´¿hJ¿úVN¿ŠÊ>?Ú%8¿¶œf?U_C¾]º¤½†Ã?çv=Ü"¾Ï6¾g½&¾òß[>`"¿0Õ8>•¿Fô¦¾i³?Z?=鯿‰\TÀ
5[?&Ñ6>Ÿÿ.¿ýkn¾Ú4•>p%5¿Î(>"×>Ù
-¿p²M>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_variance.bin
deleted file mode 100644
index 80125312bc29cc27bcb9a51db2d206c70b19c25d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_2_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_beta.bin
deleted file mode 100644
index 0a7e5127f93e4f3e77893c02209dd34f92fcde00..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ùEͽý¨½õž9¾pÊ>Ûàÿ½›³>Ÿÿ²>£f>’ë¾Ó‹Ý¹"å>¼½–¾
-ð~¾½¼¾Üöý==Á‹=ä!>D>°©>’çn>£ç;(+Ö=Ÿ‡?Üÿl?m¾$0`>¥¡<¸Ïy?Vál?‚­?(Ò'½Y?o>uƒ@>€q>…ö>ë}î>fÓW?K>¿»
?)?F¿¾Ÿ¯Y¾Ý¯?·°—>YdL¾–ZC?þÕ÷=?pUV?þV]>›Â©¾å=9{/>ɒ޽«U>¸ŠÖ½*Ï&>1Í;Mj1¾B‡A=™¾ü½æ…)?!3?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_gamma.bin
deleted file mode 100644
index ab4be7e7af315799ddc2f371e09442d81c81ec9e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_mean.bin
deleted file mode 100644
index 5c4cccbc2d7756430aba85f100d164425b7b7559..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_variance.bin
deleted file mode 100644
index 88e0320d9764ac47a0ffeccd912430db4e3a70ad..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_3_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_beta.bin
deleted file mode 100644
index 78b0f312269445116d4b9e05d3f2f85730509d46..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_gamma.bin
deleted file mode 100644
index cc9ac2a0fcc9dc57b61c54d13f9cdaba8bf045c9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_mean.bin
deleted file mode 100644
index e184ea4954ffe0e8070fd467bc90093c142ee754..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_variance.bin
deleted file mode 100644
index dd6c0672454934523c04c2e124bb64d024c2207f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_4_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_beta.bin
deleted file mode 100644
index d111c363bdab8b36db98fcefcd2eb61e080eadd4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_gamma.bin
deleted file mode 100644
index aae71935a9ec2124e203c921e2d2ca570f3aa2a8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_mean.bin
deleted file mode 100644
index b4675bad00eddb39999c5411eb225f9d13a22fc4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_variance.bin
deleted file mode 100644
index f8126c266f398a9013241ee5d97fe42beaa5bb37..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_5_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_beta.bin
deleted file mode 100644
index c18a950b0d0acca31f82e84135c392e348868011..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_gamma.bin
deleted file mode 100644
index 92bc587a86c98aadc5549f3da65b4f74e812b2fb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_mean.bin
deleted file mode 100644
index c888f2c909ac6d95871fe944b6b4f51242d4eb8a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_variance.bin
deleted file mode 100644
index a5a799857b7cc50a9aa8208aab08e7270dccca5b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_6_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_beta.bin
deleted file mode 100644
index ab02be5f352315724b5ca3b59e33ff085f46207d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_gamma.bin
deleted file mode 100644
index 72c58ae29db08ac94c3b9b778ea015405cb9d3f6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_mean.bin
deleted file mode 100644
index 7f0e01a07c23faa2101cbf299ea9d35fe3d5e3ec..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_variance.bin
deleted file mode 100644
index 094474aca2ad49d1400c71d9acbfcd1631c7be18..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_7_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_beta.bin
deleted file mode 100644
index 5f92c58a7c47c207a98a77f6961410d08e8446f0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_gamma.bin
deleted file mode 100644
index 6ab36ce54740e9e4a4e4948684e0e4fbbd71b1cb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_gamma.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-œ®?œË?®m‚?…4?¸µ?µ¦?ã?‡F'?nÑ#? 
-=?sˆ.?V>“?m¹?æÓD?	SÁ?§7?Š›¼?
‡?É&?qä™?§§?Ä'?"ZN?aég?"ò*?&.)?ÕM?%??Ы?%Œ1?Å:?æ! ?”O:?ÐѨ?9½*?õ¬?w¨?ÎÁT?HÆ
-?%„?ª)?‚e&?x°?‡Ë?µ«±?ÏÜ?C1?w¯?kì>?  ?uD?£òˆ?²’Ÿ?µ¢G?!0?Ñãœ?eŠ2?!b$?úK¬? d?Ü6¨?è‰?2À¹?÷f?ñ?ᆱ?»g?zT@?<e,?Å@7?z‚0?W*:?)‘+?£?(?«Ø;?ùI±?·Œ?Œ	?²Í?Ä2?o¶ª?¾­)? ?N³V?ßHž?Ç-?ü,-??#?HÄ?¾À}?pG?Yù#?HÁ?0è?„U?'%?u‘?£Í?O«?©é¨?ÉN›?£ED?@œ?¨À˜?v™?>²?G²?”?¶?‡ ?Y·¬?j?´ØP?^À?Xܘ?…ì?°bG?¨¨?で?r¨+?绤?ŸG£?¿¬E?&6%?áˆ?ô²?"
£?¿®›?-4?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_mean.bin
deleted file mode 100644
index 5d093813a0926dd3c1c67b6f50e092465101fde9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_mean.bin
+++ /dev/null
@@ -1,4 +0,0 @@
-¡‰>-	¿Â¿ZF%>Õ#
-¿7æ¾*~>Šß;>€Ó3>P^P>¯ÓX>íÝ¿2 Ë=Æô”>¡o"¿©àÕ>EX$¿30	>œË©<tпh{W>LÕˆ>¨Â>KZâ>+_>;™>€g¾>ÒjÇ>ô¿C‘>í>I“#¿2‹Ï>üfÁ¾‡ø+>4rc>•—˾‘÷´>ZœÕ=V±ì=Ü^é=ƒv‘>[;û¾sˆ=ÓŒ¿Ë“>@AI>¸:¿ÿäD>c˜Z>Žè–>'Š¿t7¿¬?~>gÈ{=ê© ¿9½>Jr'>ðg¿(=d>X)ÿ¾œÉL>)I¿{>ÛfC>ÅS¿”š¥>ô„Ï>í?¹>O>=>>¥>ˆ§‡>¿]&=¡q…>‘)¿Î
-¿-N>Cü=H÷>Úªø¾9>êü>+Ó>Û¿ÀDB>xÈn>²ï9>h›[>Æ•ì¾ôn–>ušp>|Zi>ûÊ—>ՏY>|‚7>îÓ‹=‹Ú÷=ðŠ´¾=—¿ µÓ¾Œ_^>Òr!¿bâ¾ÏÍÞ¾Ԑ>#鿌rH¿¾îƒ>ñ÷¿fP>å­B>â:/¿I	ƾà(r>[ïa>`
-¿O£¿c@à=A¯Û¾Y'¿‚°>3>P2F>  ¿Ä ¿ò¿&;>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_variance.bin
deleted file mode 100644
index edf6463b0ca999595327a9dc300242a9e58c1fb8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_8_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_beta.bin
deleted file mode 100644
index ad3f1dc8965ba641749d65a5d0c5b32ab40c5dd4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_gamma.bin
deleted file mode 100644
index ec2b90646b3c7f21565e4972638e746e71a2b5bb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_mean.bin
deleted file mode 100644
index 47b2393cf22e01162577be3e361a1a40caec6bb8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_variance.bin
deleted file mode 100644
index fb0c96059789a653f0d064e2c4743287b213d90d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/batch_normalization_9_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_1_w.bin
deleted file mode 100644
index 3e10934df8c5194e89ced8a8c6dfc0c496d63659..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_2_w.bin
deleted file mode 100644
index b156a80dbbad1956afde6c953b760fe3147f86dd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_3_w.bin
deleted file mode 100644
index 39ccf4d05b623c02ad5c86aa537804df697b2eca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_4_w.bin
deleted file mode 100644
index 19fa2c8035b9439be46392feee277b1e2c796994..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_5_w.bin
deleted file mode 100644
index 79d3b1efe6c1d18ce86fea69602f161425c76421..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_6_w.bin
deleted file mode 100644
index fc7d758888153e7a52ebb59e8db7822d5ca58283..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_7_w.bin
deleted file mode 100644
index d569ea19a45477b991af7bce4aa14289bb3858a4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/dense_1_b.bin
deleted file mode 100644
index dde75645d79ba2039e975a4cb2892f2cdca58038..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/dense_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-êÞ^>ÂX`¾q=Ï·‡>Hp‚>°¾¾B—b>6ÁU¾$ƒt¾½M¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/dense_1_w.bin
deleted file mode 100644
index e053b5d9d9ca19466225106fd9ad109d55e32cdb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_1_w.bin
deleted file mode 100644
index b0948ad7c455ab26b7a500823da78dd2ebdf5a2f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_2_w.bin
deleted file mode 100644
index 673879938fec8d6cea506ceba413479fe5305a72..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_3_w.bin
deleted file mode 100644
index 19e9c200ad108dcafbdac74c614b3fe637a76e0b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_4_w.bin
deleted file mode 100644
index 036b5573250744da275f27bca679c5eea90f8d67..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_5_w.bin
deleted file mode 100644
index 870049e69e3783cf45939876c6b8717033d6cce7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_6_w.bin
deleted file mode 100644
index f23ffe4c99eaac8f9f6d96d48f7312e25347f86f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/depthwise_conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/input.bin
deleted file mode 100644
index 7a6fbc28f5a947a90863278a5249303f9f52741b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/layer_composition.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/layer_composition.txt
deleted file mode 100644
index 9b8b3f7e11a428a28fecbde2c204bf39b7e02703..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/layer_composition.txt
+++ /dev/null
@@ -1,41 +0,0 @@
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-depthwise_conv  
-batchnorm  
-activation  
-conv  
-batchnorm  
-activation  
-pool  
-dense  add  
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/layers.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/layers.txt
deleted file mode 100644
index a9415755180a7ebdceb89b7e3e6d6cee258b18c4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/layers.txt
+++ /dev/null
@@ -1,41 +0,0 @@
-Conv1,10000,3,32,32,32,3,3,3
-#tensorBatchNorm1
-#tensorRelu1
-#tensorDepthwiseConv1
-#tensorBatchNorm2
-#tensorRelu2
-Conv2,10000,32,32,32,64,32,1,1
-#tensorBatchNorm3
-#tensorRelu3
-#tensorDepthwiseConv2
-#tensorBatchNorm4
-#tensorRelu4
-Conv3,10000,64,16,16,128,64,1,1
-#tensorBatchNorm5
-#tensorRelu5
-#tensorDepthwiseConv3
-#tensorBatchNorm6
-#tensorRelu6
-Conv4,10000,128,16,16,128,128,1,1
-#tensorBatchNorm7
-#tensorRelu7
-#tensorDepthwiseConv4
-#tensorBatchNorm8
-#tensorRelu8
-Conv5,10000,128,8,8,256,128,1,1
-#tensorBatchNorm9
-#tensorRelu9
-#tensorDepthwiseConv5
-#tensorBatchNorm10
-#tensorRelu10
-Conv6,10000,256,8,8,256,256,1,1
-#tensorBatchNorm11
-#tensorRelu11
-#tensorDepthwiseConv6
-#tensorBatchNorm12
-#tensorRelu12
-Conv7,10000,256,4,4,512,256,1,1
-#tensorBatchNorm13
-#tensorRelu13
-#tensorPooling1
-FC1,10000,2048,2048,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/promise_src.cc
deleted file mode 100644
index c5fd3606da51281bc2c583e98f024bd2f54f837b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/promise_src.cc
+++ /dev/null
@@ -1,238 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-int total_runs = 100; 
-for (int i = 0 ; i < total_runs; i++){ 
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-
-
-std::string dir_prefix = std::string("data/mobilenet_shallow_nathan/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = ConvLayer_PROMISE(input, -1.9892114, 2.126797, conv2d_1_w, -1.5164621164798737, 1.6472081774473288, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -9.868980642318725, 10.560956018447879, 9); 
-void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_2 = tensorRelu(var_1); 
-void* var_3 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_4 = tensorBatchNorm(var_3, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_5 = tensorRelu(var_4); 
-void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 6.821381127357554, conv2d_2_w, -1.1834390873908995, 1.2731596627235617, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -9.875998497009277, 7.51305247974393, 9); 
-void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_8 = tensorRelu(var_7); 
-void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_11 = tensorRelu(var_10); 
-void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.826067455768602, conv2d_3_w, -0.599876856982708, 0.6812073457241064, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.633289833068848, 5.177892235755925, 9); 
-void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_14 = tensorRelu(var_13); 
-void* var_15 = tensorConvolution(var_14, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-void* var_16 = tensorBatchNorm(var_15, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_17 = tensorRelu(var_16); 
-void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 4.02646304416659, conv2d_4_w, -0.4555967862010002, 0.4942613914608956, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -5.316803941726685, 4.605850250244146, 9); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_21 = tensorConvolution(var_20, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-void* var_22 = tensorBatchNorm(var_21, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_23 = tensorRelu(var_22); 
-void* var_24 = ConvLayer_PROMISE(var_23, 0.0, 4.532649063110355, conv2d_5_w, -0.35657615590095515, 0.3382165088057521, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.1012511816024775, 4.3630500688553, 9); 
-void* var_25 = tensorBatchNorm(var_24, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_26 = tensorRelu(var_25); 
-void* var_27 = tensorConvolution(var_26, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-void* var_28 = tensorBatchNorm(var_27, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_29 = tensorRelu(var_28); 
-void* var_30 = ConvLayer_PROMISE(var_29, 0.0, 3.9874704387188977, conv2d_6_w, -0.28502783328294756, 0.28604640334844594, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.243851703643799, 3.486250406742097, 9); 
-void* var_31 = tensorBatchNorm(var_30, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_32 = tensorRelu(var_31); 
-void* var_33 = tensorConvolution(var_32, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-void* var_34 = tensorBatchNorm(var_33, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_35 = tensorRelu(var_34); 
-void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 6.563065901756522, conv2d_7_w, -0.18946402323246003, 0.19012390717864017, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.938115713119507, 3.538363476753238, 9); 
-void* var_37 = tensorBatchNorm(var_36, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorPooling(var_38,1,2,2,0,0,2,2); 
-void* var_40 = FCLayer_PROMISE(var_39, 0.0, 1.8908388000727185, dense_1_w, -0.35140394401550296, 0.422872786462307, dense_1_b, -0.23878151, 0.26507422, -1, -14.630816223144532, 27.27252123260504, 9); 
-void* var_41 = tensorSoftmax(var_40); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_41); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-}
-
-dumpExecutionAccuracies(); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/src.cc
deleted file mode 100644
index 6599f7d0ea0be6a76c4154d25b3a7be2c6724115..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenet_shallow2/src.cc
+++ /dev/null
@@ -1,231 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("data/mobilenet_shallow_nathan/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,32,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,64,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,64,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,1,1); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,128,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,128,1,3,3); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,128,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,256,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,256,1,3,3); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,256,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,512,256,1,1); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,512,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,2048,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-void* var_1 = tensorBatchNorm(var_0, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_2 = tensorRelu(var_1); 
-void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_6 = tensorRelu(var_5); 
-void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-void* var_8 = tensorBatchNorm(var_7, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_9 = tensorRelu(var_8); 
-void* var_11 = tensorConvolution(var_9, depthwise_conv2d_2_w, 1, 1, 2, 2, 1, 64); 
-void* var_12 = tensorBatchNorm(var_11, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_13 = tensorRelu(var_12); 
-void* var_14 = tensorConvolution(var_13, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-void* var_15 = tensorBatchNorm(var_14, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_16 = tensorRelu(var_15); 
-void* var_18 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 128); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_21 = tensorConvolution(var_20, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-void* var_22 = tensorBatchNorm(var_21, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_23 = tensorRelu(var_22); 
-void* var_26 = tensorConvolution(var_23, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 128); 
-void* var_27 = tensorBatchNorm(var_26, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_28 = tensorRelu(var_27); 
-void* var_29 = tensorConvolution(var_28, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-void* var_30 = tensorBatchNorm(var_29, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_31 = tensorRelu(var_30); 
-void* var_33 = tensorConvolution(var_31, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 256); 
-void* var_34 = tensorBatchNorm(var_33, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_35 = tensorRelu(var_34); 
-void* var_36 = tensorConvolution(var_35, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-void* var_37 = tensorBatchNorm(var_36, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_38 = tensorRelu(var_37); 
-void* var_41 = tensorConvolution(var_38, depthwise_conv2d_6_w, 1, 1, 2, 2, 1, 256); 
-void* var_42 = tensorBatchNorm(var_41, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_43 = tensorRelu(var_42); 
-void* var_44 = tensorConvolution(var_43, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-void* var_45 = tensorBatchNorm(var_44, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_46 = tensorRelu(var_45); 
-void* var_47 = tensorPooling(var_46,1,2,2,0,0,2,2); 
-void* var_49 = tensorGemmGPU(var_47, dense_1_w); 
-void* var_50 = tensorAdd(var_49, dense_1_b); 
-void* var_51 = tensorSoftmax(var_50); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_51); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/approxhpvm_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/approxhpvm_src.cc
deleted file mode 100644
index d880493c5417e14cd15e5acfadc03b7a7ecd170a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/approxhpvm_src.cc
+++ /dev/null
@@ -1,4367 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/stat.h> 
-#include <cstring> 
-#include <visc.h> 
-#include <tensorTypes.h> 
-#include <tensorUtils.h> 
-
-void var_0_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_1_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 32); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_2_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_3_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_4_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_5_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_6_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_7_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_8_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_9_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 96); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_10_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_11_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_12_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_13_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_14_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_15_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_16_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_17_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 144); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_18_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_19_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_20_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_21_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_22_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_23_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_24_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_25_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_26_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 144); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_27_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_28_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_29_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_30_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_31_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_32_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_33_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_34_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 192); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_35_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_36_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_37_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_38_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_39_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_40_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_41_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_42_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_43_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 192); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_44_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_45_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_46_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_47_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_48_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_49_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_50_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_51_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_52_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 192); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_53_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_54_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_55_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_56_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_57_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_58_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_59_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_60_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 384); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_61_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_62_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_63_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_64_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_65_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_66_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_67_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_68_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_69_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 384); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_70_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_71_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_72_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_73_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_74_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_75_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_76_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_77_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_78_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 384); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_79_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_80_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_81_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_82_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_83_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_84_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_85_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_86_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_87_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 384); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_88_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_89_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_90_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_91_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_92_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_93_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_94_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_95_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 576); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_96_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_97_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_98_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_99_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_100_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_101_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_102_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_103_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_104_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 576); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_105_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_106_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_107_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_108_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_109_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_110_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_111_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_112_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_113_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 2, 2, 1, 576); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_114_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_115_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_116_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_117_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_118_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_119_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_120_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_121_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 960); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_122_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_123_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_124_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_125_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_126_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_127_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_128_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_129_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_130_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 960); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_131_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_132_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_133_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_134_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_135_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_136_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_137_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_138_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_139_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_group_convolution(t1, t2, 1, 1, 1, 1, 1, 960); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_140_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_141_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_142_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_143_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_144_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 0, 0, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_145_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2, void* t3, size_t bytes_t3, void* t4, size_t bytes_t4, void* t5, size_t bytes_t5) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(5, t1, t2, t3, t4, t5, 0); 
-
-  void *r = __visc__tensor_batchnorm(t1, t2, t3, t4, t5, 0.001); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_146_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_147_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_avg(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_148_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_mul(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_149_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_150_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_softmax(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void root(void* input, size_t input_bytes, 
-	  void* conv2d_1_w, size_t conv2d_1_w_bytes, 
-	  void* depthwise_conv2d_1_w, size_t depthwise_conv2d_1_w_bytes, 
-	  void* batch_normalization_1_gamma, size_t batch_normalization_1_gamma_bytes, 
-	  void* batch_normalization_1_beta, size_t batch_normalization_1_beta_bytes, 
-	  void* batch_normalization_1_mean, size_t batch_normalization_1_mean_bytes, 
-	  void* batch_normalization_1_variance, size_t batch_normalization_1_variance_bytes, 
-	  void* conv2d_2_w, size_t conv2d_2_w_bytes, 
-	  void* batch_normalization_2_gamma, size_t batch_normalization_2_gamma_bytes, 
-	  void* batch_normalization_2_beta, size_t batch_normalization_2_beta_bytes, 
-	  void* batch_normalization_2_mean, size_t batch_normalization_2_mean_bytes, 
-	  void* batch_normalization_2_variance, size_t batch_normalization_2_variance_bytes, 
-	  void* conv2d_3_w, size_t conv2d_3_w_bytes, 
-	  void* batch_normalization_3_gamma, size_t batch_normalization_3_gamma_bytes, 
-	  void* batch_normalization_3_beta, size_t batch_normalization_3_beta_bytes, 
-	  void* batch_normalization_3_mean, size_t batch_normalization_3_mean_bytes, 
-	  void* batch_normalization_3_variance, size_t batch_normalization_3_variance_bytes, 
-	  void* depthwise_conv2d_2_w, size_t depthwise_conv2d_2_w_bytes, 
-	  void* batch_normalization_4_gamma, size_t batch_normalization_4_gamma_bytes, 
-	  void* batch_normalization_4_beta, size_t batch_normalization_4_beta_bytes, 
-	  void* batch_normalization_4_mean, size_t batch_normalization_4_mean_bytes, 
-	  void* batch_normalization_4_variance, size_t batch_normalization_4_variance_bytes, 
-	  void* conv2d_4_w, size_t conv2d_4_w_bytes, 
-	  void* batch_normalization_5_gamma, size_t batch_normalization_5_gamma_bytes, 
-	  void* batch_normalization_5_beta, size_t batch_normalization_5_beta_bytes, 
-	  void* batch_normalization_5_mean, size_t batch_normalization_5_mean_bytes, 
-	  void* batch_normalization_5_variance, size_t batch_normalization_5_variance_bytes, 
-	  void* conv2d_5_w, size_t conv2d_5_w_bytes, 
-	  void* batch_normalization_6_gamma, size_t batch_normalization_6_gamma_bytes, 
-	  void* batch_normalization_6_beta, size_t batch_normalization_6_beta_bytes, 
-	  void* batch_normalization_6_mean, size_t batch_normalization_6_mean_bytes, 
-	  void* batch_normalization_6_variance, size_t batch_normalization_6_variance_bytes, 
-	  void* depthwise_conv2d_3_w, size_t depthwise_conv2d_3_w_bytes, 
-	  void* batch_normalization_7_gamma, size_t batch_normalization_7_gamma_bytes, 
-	  void* batch_normalization_7_beta, size_t batch_normalization_7_beta_bytes, 
-	  void* batch_normalization_7_mean, size_t batch_normalization_7_mean_bytes, 
-	  void* batch_normalization_7_variance, size_t batch_normalization_7_variance_bytes, 
-	  void* conv2d_6_w, size_t conv2d_6_w_bytes, 
-	  void* batch_normalization_8_gamma, size_t batch_normalization_8_gamma_bytes, 
-	  void* batch_normalization_8_beta, size_t batch_normalization_8_beta_bytes, 
-	  void* batch_normalization_8_mean, size_t batch_normalization_8_mean_bytes, 
-	  void* batch_normalization_8_variance, size_t batch_normalization_8_variance_bytes, 
-	  void* conv2d_7_w, size_t conv2d_7_w_bytes, 
-	  void* batch_normalization_9_gamma, size_t batch_normalization_9_gamma_bytes, 
-	  void* batch_normalization_9_beta, size_t batch_normalization_9_beta_bytes, 
-	  void* batch_normalization_9_mean, size_t batch_normalization_9_mean_bytes, 
-	  void* batch_normalization_9_variance, size_t batch_normalization_9_variance_bytes, 
-	  void* depthwise_conv2d_4_w, size_t depthwise_conv2d_4_w_bytes, 
-	  void* batch_normalization_10_gamma, size_t batch_normalization_10_gamma_bytes, 
-	  void* batch_normalization_10_beta, size_t batch_normalization_10_beta_bytes, 
-	  void* batch_normalization_10_mean, size_t batch_normalization_10_mean_bytes, 
-	  void* batch_normalization_10_variance, size_t batch_normalization_10_variance_bytes, 
-	  void* conv2d_8_w, size_t conv2d_8_w_bytes, 
-	  void* batch_normalization_11_gamma, size_t batch_normalization_11_gamma_bytes, 
-	  void* batch_normalization_11_beta, size_t batch_normalization_11_beta_bytes, 
-	  void* batch_normalization_11_mean, size_t batch_normalization_11_mean_bytes, 
-	  void* batch_normalization_11_variance, size_t batch_normalization_11_variance_bytes, 
-	  void* conv2d_9_w, size_t conv2d_9_w_bytes, 
-	  void* batch_normalization_12_gamma, size_t batch_normalization_12_gamma_bytes, 
-	  void* batch_normalization_12_beta, size_t batch_normalization_12_beta_bytes, 
-	  void* batch_normalization_12_mean, size_t batch_normalization_12_mean_bytes, 
-	  void* batch_normalization_12_variance, size_t batch_normalization_12_variance_bytes, 
-	  void* depthwise_conv2d_5_w, size_t depthwise_conv2d_5_w_bytes, 
-	  void* batch_normalization_13_gamma, size_t batch_normalization_13_gamma_bytes, 
-	  void* batch_normalization_13_beta, size_t batch_normalization_13_beta_bytes, 
-	  void* batch_normalization_13_mean, size_t batch_normalization_13_mean_bytes, 
-	  void* batch_normalization_13_variance, size_t batch_normalization_13_variance_bytes, 
-	  void* conv2d_10_w, size_t conv2d_10_w_bytes, 
-	  void* batch_normalization_14_gamma, size_t batch_normalization_14_gamma_bytes, 
-	  void* batch_normalization_14_beta, size_t batch_normalization_14_beta_bytes, 
-	  void* batch_normalization_14_mean, size_t batch_normalization_14_mean_bytes, 
-	  void* batch_normalization_14_variance, size_t batch_normalization_14_variance_bytes, 
-	  void* conv2d_11_w, size_t conv2d_11_w_bytes, 
-	  void* batch_normalization_15_gamma, size_t batch_normalization_15_gamma_bytes, 
-	  void* batch_normalization_15_beta, size_t batch_normalization_15_beta_bytes, 
-	  void* batch_normalization_15_mean, size_t batch_normalization_15_mean_bytes, 
-	  void* batch_normalization_15_variance, size_t batch_normalization_15_variance_bytes, 
-	  void* depthwise_conv2d_6_w, size_t depthwise_conv2d_6_w_bytes, 
-	  void* batch_normalization_16_gamma, size_t batch_normalization_16_gamma_bytes, 
-	  void* batch_normalization_16_beta, size_t batch_normalization_16_beta_bytes, 
-	  void* batch_normalization_16_mean, size_t batch_normalization_16_mean_bytes, 
-	  void* batch_normalization_16_variance, size_t batch_normalization_16_variance_bytes, 
-	  void* conv2d_12_w, size_t conv2d_12_w_bytes, 
-	  void* batch_normalization_17_gamma, size_t batch_normalization_17_gamma_bytes, 
-	  void* batch_normalization_17_beta, size_t batch_normalization_17_beta_bytes, 
-	  void* batch_normalization_17_mean, size_t batch_normalization_17_mean_bytes, 
-	  void* batch_normalization_17_variance, size_t batch_normalization_17_variance_bytes, 
-	  void* conv2d_13_w, size_t conv2d_13_w_bytes, 
-	  void* batch_normalization_18_gamma, size_t batch_normalization_18_gamma_bytes, 
-	  void* batch_normalization_18_beta, size_t batch_normalization_18_beta_bytes, 
-	  void* batch_normalization_18_mean, size_t batch_normalization_18_mean_bytes, 
-	  void* batch_normalization_18_variance, size_t batch_normalization_18_variance_bytes, 
-	  void* depthwise_conv2d_7_w, size_t depthwise_conv2d_7_w_bytes, 
-	  void* batch_normalization_19_gamma, size_t batch_normalization_19_gamma_bytes, 
-	  void* batch_normalization_19_beta, size_t batch_normalization_19_beta_bytes, 
-	  void* batch_normalization_19_mean, size_t batch_normalization_19_mean_bytes, 
-	  void* batch_normalization_19_variance, size_t batch_normalization_19_variance_bytes, 
-	  void* conv2d_14_w, size_t conv2d_14_w_bytes, 
-	  void* batch_normalization_20_gamma, size_t batch_normalization_20_gamma_bytes, 
-	  void* batch_normalization_20_beta, size_t batch_normalization_20_beta_bytes, 
-	  void* batch_normalization_20_mean, size_t batch_normalization_20_mean_bytes, 
-	  void* batch_normalization_20_variance, size_t batch_normalization_20_variance_bytes, 
-	  void* conv2d_15_w, size_t conv2d_15_w_bytes, 
-	  void* batch_normalization_21_gamma, size_t batch_normalization_21_gamma_bytes, 
-	  void* batch_normalization_21_beta, size_t batch_normalization_21_beta_bytes, 
-	  void* batch_normalization_21_mean, size_t batch_normalization_21_mean_bytes, 
-	  void* batch_normalization_21_variance, size_t batch_normalization_21_variance_bytes, 
-	  void* depthwise_conv2d_8_w, size_t depthwise_conv2d_8_w_bytes, 
-	  void* batch_normalization_22_gamma, size_t batch_normalization_22_gamma_bytes, 
-	  void* batch_normalization_22_beta, size_t batch_normalization_22_beta_bytes, 
-	  void* batch_normalization_22_mean, size_t batch_normalization_22_mean_bytes, 
-	  void* batch_normalization_22_variance, size_t batch_normalization_22_variance_bytes, 
-	  void* conv2d_16_w, size_t conv2d_16_w_bytes, 
-	  void* batch_normalization_23_gamma, size_t batch_normalization_23_gamma_bytes, 
-	  void* batch_normalization_23_beta, size_t batch_normalization_23_beta_bytes, 
-	  void* batch_normalization_23_mean, size_t batch_normalization_23_mean_bytes, 
-	  void* batch_normalization_23_variance, size_t batch_normalization_23_variance_bytes, 
-	  void* conv2d_17_w, size_t conv2d_17_w_bytes, 
-	  void* batch_normalization_24_gamma, size_t batch_normalization_24_gamma_bytes, 
-	  void* batch_normalization_24_beta, size_t batch_normalization_24_beta_bytes, 
-	  void* batch_normalization_24_mean, size_t batch_normalization_24_mean_bytes, 
-	  void* batch_normalization_24_variance, size_t batch_normalization_24_variance_bytes, 
-	  void* depthwise_conv2d_9_w, size_t depthwise_conv2d_9_w_bytes, 
-	  void* batch_normalization_25_gamma, size_t batch_normalization_25_gamma_bytes, 
-	  void* batch_normalization_25_beta, size_t batch_normalization_25_beta_bytes, 
-	  void* batch_normalization_25_mean, size_t batch_normalization_25_mean_bytes, 
-	  void* batch_normalization_25_variance, size_t batch_normalization_25_variance_bytes, 
-	  void* conv2d_18_w, size_t conv2d_18_w_bytes, 
-	  void* batch_normalization_26_gamma, size_t batch_normalization_26_gamma_bytes, 
-	  void* batch_normalization_26_beta, size_t batch_normalization_26_beta_bytes, 
-	  void* batch_normalization_26_mean, size_t batch_normalization_26_mean_bytes, 
-	  void* batch_normalization_26_variance, size_t batch_normalization_26_variance_bytes, 
-	  void* conv2d_19_w, size_t conv2d_19_w_bytes, 
-	  void* batch_normalization_27_gamma, size_t batch_normalization_27_gamma_bytes, 
-	  void* batch_normalization_27_beta, size_t batch_normalization_27_beta_bytes, 
-	  void* batch_normalization_27_mean, size_t batch_normalization_27_mean_bytes, 
-	  void* batch_normalization_27_variance, size_t batch_normalization_27_variance_bytes, 
-	  void* depthwise_conv2d_10_w, size_t depthwise_conv2d_10_w_bytes, 
-	  void* batch_normalization_28_gamma, size_t batch_normalization_28_gamma_bytes, 
-	  void* batch_normalization_28_beta, size_t batch_normalization_28_beta_bytes, 
-	  void* batch_normalization_28_mean, size_t batch_normalization_28_mean_bytes, 
-	  void* batch_normalization_28_variance, size_t batch_normalization_28_variance_bytes, 
-	  void* conv2d_20_w, size_t conv2d_20_w_bytes, 
-	  void* batch_normalization_29_gamma, size_t batch_normalization_29_gamma_bytes, 
-	  void* batch_normalization_29_beta, size_t batch_normalization_29_beta_bytes, 
-	  void* batch_normalization_29_mean, size_t batch_normalization_29_mean_bytes, 
-	  void* batch_normalization_29_variance, size_t batch_normalization_29_variance_bytes, 
-	  void* conv2d_21_w, size_t conv2d_21_w_bytes, 
-	  void* batch_normalization_30_gamma, size_t batch_normalization_30_gamma_bytes, 
-	  void* batch_normalization_30_beta, size_t batch_normalization_30_beta_bytes, 
-	  void* batch_normalization_30_mean, size_t batch_normalization_30_mean_bytes, 
-	  void* batch_normalization_30_variance, size_t batch_normalization_30_variance_bytes, 
-	  void* depthwise_conv2d_11_w, size_t depthwise_conv2d_11_w_bytes, 
-	  void* batch_normalization_31_gamma, size_t batch_normalization_31_gamma_bytes, 
-	  void* batch_normalization_31_beta, size_t batch_normalization_31_beta_bytes, 
-	  void* batch_normalization_31_mean, size_t batch_normalization_31_mean_bytes, 
-	  void* batch_normalization_31_variance, size_t batch_normalization_31_variance_bytes, 
-	  void* conv2d_22_w, size_t conv2d_22_w_bytes, 
-	  void* batch_normalization_32_gamma, size_t batch_normalization_32_gamma_bytes, 
-	  void* batch_normalization_32_beta, size_t batch_normalization_32_beta_bytes, 
-	  void* batch_normalization_32_mean, size_t batch_normalization_32_mean_bytes, 
-	  void* batch_normalization_32_variance, size_t batch_normalization_32_variance_bytes, 
-	  void* conv2d_23_w, size_t conv2d_23_w_bytes, 
-	  void* batch_normalization_33_gamma, size_t batch_normalization_33_gamma_bytes, 
-	  void* batch_normalization_33_beta, size_t batch_normalization_33_beta_bytes, 
-	  void* batch_normalization_33_mean, size_t batch_normalization_33_mean_bytes, 
-	  void* batch_normalization_33_variance, size_t batch_normalization_33_variance_bytes, 
-	  void* depthwise_conv2d_12_w, size_t depthwise_conv2d_12_w_bytes, 
-	  void* batch_normalization_34_gamma, size_t batch_normalization_34_gamma_bytes, 
-	  void* batch_normalization_34_beta, size_t batch_normalization_34_beta_bytes, 
-	  void* batch_normalization_34_mean, size_t batch_normalization_34_mean_bytes, 
-	  void* batch_normalization_34_variance, size_t batch_normalization_34_variance_bytes, 
-	  void* conv2d_24_w, size_t conv2d_24_w_bytes, 
-	  void* batch_normalization_35_gamma, size_t batch_normalization_35_gamma_bytes, 
-	  void* batch_normalization_35_beta, size_t batch_normalization_35_beta_bytes, 
-	  void* batch_normalization_35_mean, size_t batch_normalization_35_mean_bytes, 
-	  void* batch_normalization_35_variance, size_t batch_normalization_35_variance_bytes, 
-	  void* conv2d_25_w, size_t conv2d_25_w_bytes, 
-	  void* batch_normalization_36_gamma, size_t batch_normalization_36_gamma_bytes, 
-	  void* batch_normalization_36_beta, size_t batch_normalization_36_beta_bytes, 
-	  void* batch_normalization_36_mean, size_t batch_normalization_36_mean_bytes, 
-	  void* batch_normalization_36_variance, size_t batch_normalization_36_variance_bytes, 
-	  void* depthwise_conv2d_13_w, size_t depthwise_conv2d_13_w_bytes, 
-	  void* batch_normalization_37_gamma, size_t batch_normalization_37_gamma_bytes, 
-	  void* batch_normalization_37_beta, size_t batch_normalization_37_beta_bytes, 
-	  void* batch_normalization_37_mean, size_t batch_normalization_37_mean_bytes, 
-	  void* batch_normalization_37_variance, size_t batch_normalization_37_variance_bytes, 
-	  void* conv2d_26_w, size_t conv2d_26_w_bytes, 
-	  void* batch_normalization_38_gamma, size_t batch_normalization_38_gamma_bytes, 
-	  void* batch_normalization_38_beta, size_t batch_normalization_38_beta_bytes, 
-	  void* batch_normalization_38_mean, size_t batch_normalization_38_mean_bytes, 
-	  void* batch_normalization_38_variance, size_t batch_normalization_38_variance_bytes, 
-	  void* conv2d_27_w, size_t conv2d_27_w_bytes, 
-	  void* batch_normalization_39_gamma, size_t batch_normalization_39_gamma_bytes, 
-	  void* batch_normalization_39_beta, size_t batch_normalization_39_beta_bytes, 
-	  void* batch_normalization_39_mean, size_t batch_normalization_39_mean_bytes, 
-	  void* batch_normalization_39_variance, size_t batch_normalization_39_variance_bytes, 
-	  void* depthwise_conv2d_14_w, size_t depthwise_conv2d_14_w_bytes, 
-	  void* batch_normalization_40_gamma, size_t batch_normalization_40_gamma_bytes, 
-	  void* batch_normalization_40_beta, size_t batch_normalization_40_beta_bytes, 
-	  void* batch_normalization_40_mean, size_t batch_normalization_40_mean_bytes, 
-	  void* batch_normalization_40_variance, size_t batch_normalization_40_variance_bytes, 
-	  void* conv2d_28_w, size_t conv2d_28_w_bytes, 
-	  void* batch_normalization_41_gamma, size_t batch_normalization_41_gamma_bytes, 
-	  void* batch_normalization_41_beta, size_t batch_normalization_41_beta_bytes, 
-	  void* batch_normalization_41_mean, size_t batch_normalization_41_mean_bytes, 
-	  void* batch_normalization_41_variance, size_t batch_normalization_41_variance_bytes, 
-	  void* conv2d_29_w, size_t conv2d_29_w_bytes, 
-	  void* batch_normalization_42_gamma, size_t batch_normalization_42_gamma_bytes, 
-	  void* batch_normalization_42_beta, size_t batch_normalization_42_beta_bytes, 
-	  void* batch_normalization_42_mean, size_t batch_normalization_42_mean_bytes, 
-	  void* batch_normalization_42_variance, size_t batch_normalization_42_variance_bytes, 
-	  void* depthwise_conv2d_15_w, size_t depthwise_conv2d_15_w_bytes, 
-	  void* batch_normalization_43_gamma, size_t batch_normalization_43_gamma_bytes, 
-	  void* batch_normalization_43_beta, size_t batch_normalization_43_beta_bytes, 
-	  void* batch_normalization_43_mean, size_t batch_normalization_43_mean_bytes, 
-	  void* batch_normalization_43_variance, size_t batch_normalization_43_variance_bytes, 
-	  void* conv2d_30_w, size_t conv2d_30_w_bytes, 
-	  void* batch_normalization_44_gamma, size_t batch_normalization_44_gamma_bytes, 
-	  void* batch_normalization_44_beta, size_t batch_normalization_44_beta_bytes, 
-	  void* batch_normalization_44_mean, size_t batch_normalization_44_mean_bytes, 
-	  void* batch_normalization_44_variance, size_t batch_normalization_44_variance_bytes, 
-	  void* conv2d_31_w, size_t conv2d_31_w_bytes, 
-	  void* batch_normalization_45_gamma, size_t batch_normalization_45_gamma_bytes, 
-	  void* batch_normalization_45_beta, size_t batch_normalization_45_beta_bytes, 
-	  void* batch_normalization_45_mean, size_t batch_normalization_45_mean_bytes, 
-	  void* batch_normalization_45_variance, size_t batch_normalization_45_variance_bytes, 
-	  void* depthwise_conv2d_16_w, size_t depthwise_conv2d_16_w_bytes, 
-	  void* batch_normalization_46_gamma, size_t batch_normalization_46_gamma_bytes, 
-	  void* batch_normalization_46_beta, size_t batch_normalization_46_beta_bytes, 
-	  void* batch_normalization_46_mean, size_t batch_normalization_46_mean_bytes, 
-	  void* batch_normalization_46_variance, size_t batch_normalization_46_variance_bytes, 
-	  void* conv2d_32_w, size_t conv2d_32_w_bytes, 
-	  void* batch_normalization_47_gamma, size_t batch_normalization_47_gamma_bytes, 
-	  void* batch_normalization_47_beta, size_t batch_normalization_47_beta_bytes, 
-	  void* batch_normalization_47_mean, size_t batch_normalization_47_mean_bytes, 
-	  void* batch_normalization_47_variance, size_t batch_normalization_47_variance_bytes, 
-	  void* conv2d_33_w, size_t conv2d_33_w_bytes, 
-	  void* batch_normalization_48_gamma, size_t batch_normalization_48_gamma_bytes, 
-	  void* batch_normalization_48_beta, size_t batch_normalization_48_beta_bytes, 
-	  void* batch_normalization_48_mean, size_t batch_normalization_48_mean_bytes, 
-	  void* batch_normalization_48_variance, size_t batch_normalization_48_variance_bytes, 
-	  void* depthwise_conv2d_17_w, size_t depthwise_conv2d_17_w_bytes, 
-	  void* batch_normalization_49_gamma, size_t batch_normalization_49_gamma_bytes, 
-	  void* batch_normalization_49_beta, size_t batch_normalization_49_beta_bytes, 
-	  void* batch_normalization_49_mean, size_t batch_normalization_49_mean_bytes, 
-	  void* batch_normalization_49_variance, size_t batch_normalization_49_variance_bytes, 
-	  void* conv2d_34_w, size_t conv2d_34_w_bytes, 
-	  void* batch_normalization_50_gamma, size_t batch_normalization_50_gamma_bytes, 
-	  void* batch_normalization_50_beta, size_t batch_normalization_50_beta_bytes, 
-	  void* batch_normalization_50_mean, size_t batch_normalization_50_mean_bytes, 
-	  void* batch_normalization_50_variance, size_t batch_normalization_50_variance_bytes, 
-	  void* conv2d_35_w, size_t conv2d_35_w_bytes, 
-	  void* batch_normalization_51_gamma, size_t batch_normalization_51_gamma_bytes, 
-	  void* batch_normalization_51_beta, size_t batch_normalization_51_beta_bytes, 
-	  void* batch_normalization_51_mean, size_t batch_normalization_51_mean_bytes, 
-	  void* batch_normalization_51_variance, size_t batch_normalization_51_variance_bytes, 
-	  void* dense_1_w, size_t dense_1_w_bytes, 
-	  void* dense_1_b, size_t dense_1_b_bytes){ 
-
-
-  __visc__hint(visc::CPU_TARGET); 
-  __visc__attributes(259, input, conv2d_1_w, depthwise_conv2d_1_w, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, conv2d_2_w, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, conv2d_3_w, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, depthwise_conv2d_2_w, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, conv2d_4_w, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, conv2d_5_w, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, depthwise_conv2d_3_w, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, conv2d_6_w, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, conv2d_7_w, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, depthwise_conv2d_4_w, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, conv2d_8_w, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, conv2d_9_w, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, depthwise_conv2d_5_w, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, conv2d_10_w, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, conv2d_11_w, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, depthwise_conv2d_6_w, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, conv2d_12_w, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, conv2d_13_w, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, depthwise_conv2d_7_w, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, conv2d_14_w, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, conv2d_15_w, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, depthwise_conv2d_8_w, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, conv2d_16_w, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, conv2d_17_w, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, depthwise_conv2d_9_w, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, conv2d_18_w, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, conv2d_19_w, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, depthwise_conv2d_10_w, batch_normalization_28_gamma, batch_normalization_28_beta, batch_normalization_28_mean, batch_normalization_28_variance, conv2d_20_w, batch_normalization_29_gamma, batch_normalization_29_beta, batch_normalization_29_mean, batch_normalization_29_variance, conv2d_21_w, batch_normalization_30_gamma, batch_normalization_30_beta, batch_normalization_30_mean, batch_normalization_30_variance, depthwise_conv2d_11_w, batch_normalization_31_gamma, batch_normalization_31_beta, batch_normalization_31_mean, batch_normalization_31_variance, conv2d_22_w, batch_normalization_32_gamma, batch_normalization_32_beta, batch_normalization_32_mean, batch_normalization_32_variance, conv2d_23_w, batch_normalization_33_gamma, batch_normalization_33_beta, batch_normalization_33_mean, batch_normalization_33_variance, depthwise_conv2d_12_w, batch_normalization_34_gamma, batch_normalization_34_beta, batch_normalization_34_mean, batch_normalization_34_variance, conv2d_24_w, batch_normalization_35_gamma, batch_normalization_35_beta, batch_normalization_35_mean, batch_normalization_35_variance, conv2d_25_w, batch_normalization_36_gamma, batch_normalization_36_beta, batch_normalization_36_mean, batch_normalization_36_variance, depthwise_conv2d_13_w, batch_normalization_37_gamma, batch_normalization_37_beta, batch_normalization_37_mean, batch_normalization_37_variance, conv2d_26_w, batch_normalization_38_gamma, batch_normalization_38_beta, batch_normalization_38_mean, batch_normalization_38_variance, conv2d_27_w, batch_normalization_39_gamma, batch_normalization_39_beta, batch_normalization_39_mean, batch_normalization_39_variance, depthwise_conv2d_14_w, batch_normalization_40_gamma, batch_normalization_40_beta, batch_normalization_40_mean, batch_normalization_40_variance, conv2d_28_w, batch_normalization_41_gamma, batch_normalization_41_beta, batch_normalization_41_mean, batch_normalization_41_variance, conv2d_29_w, batch_normalization_42_gamma, batch_normalization_42_beta, batch_normalization_42_mean, batch_normalization_42_variance, depthwise_conv2d_15_w, batch_normalization_43_gamma, batch_normalization_43_beta, batch_normalization_43_mean, batch_normalization_43_variance, conv2d_30_w, batch_normalization_44_gamma, batch_normalization_44_beta, batch_normalization_44_mean, batch_normalization_44_variance, conv2d_31_w, batch_normalization_45_gamma, batch_normalization_45_beta, batch_normalization_45_mean, batch_normalization_45_variance, depthwise_conv2d_16_w, batch_normalization_46_gamma, batch_normalization_46_beta, batch_normalization_46_mean, batch_normalization_46_variance, conv2d_32_w, batch_normalization_47_gamma, batch_normalization_47_beta, batch_normalization_47_mean, batch_normalization_47_variance, conv2d_33_w, batch_normalization_48_gamma, batch_normalization_48_beta, batch_normalization_48_mean, batch_normalization_48_variance, depthwise_conv2d_17_w, batch_normalization_49_gamma, batch_normalization_49_beta, batch_normalization_49_mean, batch_normalization_49_variance, conv2d_34_w, batch_normalization_50_gamma, batch_normalization_50_beta, batch_normalization_50_mean, batch_normalization_50_variance, conv2d_35_w, batch_normalization_51_gamma, batch_normalization_51_beta, batch_normalization_51_mean, batch_normalization_51_variance, dense_1_w, dense_1_b, 0); 
-
-
-  void* var_0 = __visc__createNodeND(0, var_0_node); 
-
-  __visc__bindIn(var_0, 0, 0, 0); 
-  __visc__bindIn(var_0, 1, 1, 0); 
-  __visc__bindIn(var_0, 2, 2, 0); 
-  __visc__bindIn(var_0, 3, 3, 0); 
-
-  void* var_1 = __visc__createNodeND(0, var_1_node); 
-
-  __visc__edge(var_0, var_1, 1, 0, 0, 0); 
-  __visc__edge(var_0, var_1, 1, 1, 1, 0); 
-  __visc__bindIn(var_1, 4, 2, 0); 
-  __visc__bindIn(var_1, 5, 3, 0); 
-
-  void* var_2 = __visc__createNodeND(0, var_2_node); 
-
-  __visc__edge(var_1, var_2, 1, 0, 0, 0); 
-  __visc__edge(var_1, var_2, 1, 1, 1, 0); 
-  __visc__bindIn(var_2, 6, 2, 0); 
-  __visc__bindIn(var_2, 7, 3, 0); 
-  __visc__bindIn(var_2, 8, 4, 0); 
-  __visc__bindIn(var_2, 9, 5, 0); 
-  __visc__bindIn(var_2, 10, 6, 0); 
-  __visc__bindIn(var_2, 11, 7, 0); 
-  __visc__bindIn(var_2, 12, 8, 0); 
-  __visc__bindIn(var_2, 13, 9, 0); 
-
-  void* var_3 = __visc__createNodeND(0, var_3_node); 
-
-  __visc__edge(var_2, var_3, 1, 0, 0, 0); 
-  __visc__edge(var_2, var_3, 1, 1, 1, 0); 
-
-  void* var_4 = __visc__createNodeND(0, var_4_node); 
-
-  __visc__edge(var_3, var_4, 1, 0, 0, 0); 
-  __visc__edge(var_3, var_4, 1, 1, 1, 0); 
-  __visc__bindIn(var_4, 14, 2, 0); 
-  __visc__bindIn(var_4, 15, 3, 0); 
-
-  void* var_5 = __visc__createNodeND(0, var_5_node); 
-
-  __visc__edge(var_4, var_5, 1, 0, 0, 0); 
-  __visc__edge(var_4, var_5, 1, 1, 1, 0); 
-  __visc__bindIn(var_5, 16, 2, 0); 
-  __visc__bindIn(var_5, 17, 3, 0); 
-  __visc__bindIn(var_5, 18, 4, 0); 
-  __visc__bindIn(var_5, 19, 5, 0); 
-  __visc__bindIn(var_5, 20, 6, 0); 
-  __visc__bindIn(var_5, 21, 7, 0); 
-  __visc__bindIn(var_5, 22, 8, 0); 
-  __visc__bindIn(var_5, 23, 9, 0); 
-
-  void* var_6 = __visc__createNodeND(0, var_6_node); 
-
-  __visc__edge(var_5, var_6, 1, 0, 0, 0); 
-  __visc__edge(var_5, var_6, 1, 1, 1, 0); 
-  __visc__bindIn(var_6, 24, 2, 0); 
-  __visc__bindIn(var_6, 25, 3, 0); 
-
-  void* var_7 = __visc__createNodeND(0, var_7_node); 
-
-  __visc__edge(var_6, var_7, 1, 0, 0, 0); 
-  __visc__edge(var_6, var_7, 1, 1, 1, 0); 
-  __visc__bindIn(var_7, 26, 2, 0); 
-  __visc__bindIn(var_7, 27, 3, 0); 
-  __visc__bindIn(var_7, 28, 4, 0); 
-  __visc__bindIn(var_7, 29, 5, 0); 
-  __visc__bindIn(var_7, 30, 6, 0); 
-  __visc__bindIn(var_7, 31, 7, 0); 
-  __visc__bindIn(var_7, 32, 8, 0); 
-  __visc__bindIn(var_7, 33, 9, 0); 
-
-  void* var_8 = __visc__createNodeND(0, var_8_node); 
-
-  __visc__edge(var_7, var_8, 1, 0, 0, 0); 
-  __visc__edge(var_7, var_8, 1, 1, 1, 0); 
-
-  void* var_9 = __visc__createNodeND(0, var_9_node); 
-
-  __visc__edge(var_8, var_9, 1, 0, 0, 0); 
-  __visc__edge(var_8, var_9, 1, 1, 1, 0); 
-  __visc__bindIn(var_9, 34, 2, 0); 
-  __visc__bindIn(var_9, 35, 3, 0); 
-
-  void* var_10 = __visc__createNodeND(0, var_10_node); 
-
-  __visc__edge(var_9, var_10, 1, 0, 0, 0); 
-  __visc__edge(var_9, var_10, 1, 1, 1, 0); 
-  __visc__bindIn(var_10, 36, 2, 0); 
-  __visc__bindIn(var_10, 37, 3, 0); 
-  __visc__bindIn(var_10, 38, 4, 0); 
-  __visc__bindIn(var_10, 39, 5, 0); 
-  __visc__bindIn(var_10, 40, 6, 0); 
-  __visc__bindIn(var_10, 41, 7, 0); 
-  __visc__bindIn(var_10, 42, 8, 0); 
-  __visc__bindIn(var_10, 43, 9, 0); 
-
-  void* var_11 = __visc__createNodeND(0, var_11_node); 
-
-  __visc__edge(var_10, var_11, 1, 0, 0, 0); 
-  __visc__edge(var_10, var_11, 1, 1, 1, 0); 
-
-  void* var_12 = __visc__createNodeND(0, var_12_node); 
-
-  __visc__edge(var_11, var_12, 1, 0, 0, 0); 
-  __visc__edge(var_11, var_12, 1, 1, 1, 0); 
-  __visc__bindIn(var_12, 44, 2, 0); 
-  __visc__bindIn(var_12, 45, 3, 0); 
-
-  void* var_13 = __visc__createNodeND(0, var_13_node); 
-
-  __visc__edge(var_12, var_13, 1, 0, 0, 0); 
-  __visc__edge(var_12, var_13, 1, 1, 1, 0); 
-  __visc__bindIn(var_13, 46, 2, 0); 
-  __visc__bindIn(var_13, 47, 3, 0); 
-  __visc__bindIn(var_13, 48, 4, 0); 
-  __visc__bindIn(var_13, 49, 5, 0); 
-  __visc__bindIn(var_13, 50, 6, 0); 
-  __visc__bindIn(var_13, 51, 7, 0); 
-  __visc__bindIn(var_13, 52, 8, 0); 
-  __visc__bindIn(var_13, 53, 9, 0); 
-
-  void* var_14 = __visc__createNodeND(0, var_14_node); 
-
-  __visc__edge(var_13, var_14, 1, 0, 0, 0); 
-  __visc__edge(var_13, var_14, 1, 1, 1, 0); 
-  __visc__bindIn(var_14, 54, 2, 0); 
-  __visc__bindIn(var_14, 55, 3, 0); 
-
-  void* var_15 = __visc__createNodeND(0, var_15_node); 
-
-  __visc__edge(var_14, var_15, 1, 0, 0, 0); 
-  __visc__edge(var_14, var_15, 1, 1, 1, 0); 
-  __visc__bindIn(var_15, 56, 2, 0); 
-  __visc__bindIn(var_15, 57, 3, 0); 
-  __visc__bindIn(var_15, 58, 4, 0); 
-  __visc__bindIn(var_15, 59, 5, 0); 
-  __visc__bindIn(var_15, 60, 6, 0); 
-  __visc__bindIn(var_15, 61, 7, 0); 
-  __visc__bindIn(var_15, 62, 8, 0); 
-  __visc__bindIn(var_15, 63, 9, 0); 
-
-  void* var_16 = __visc__createNodeND(0, var_16_node); 
-
-  __visc__edge(var_15, var_16, 1, 0, 0, 0); 
-  __visc__edge(var_15, var_16, 1, 1, 1, 0); 
-
-  void* var_17 = __visc__createNodeND(0, var_17_node); 
-
-  __visc__edge(var_16, var_17, 1, 0, 0, 0); 
-  __visc__edge(var_16, var_17, 1, 1, 1, 0); 
-  __visc__bindIn(var_17, 64, 2, 0); 
-  __visc__bindIn(var_17, 65, 3, 0); 
-
-  void* var_18 = __visc__createNodeND(0, var_18_node); 
-
-  __visc__edge(var_17, var_18, 1, 0, 0, 0); 
-  __visc__edge(var_17, var_18, 1, 1, 1, 0); 
-  __visc__bindIn(var_18, 66, 2, 0); 
-  __visc__bindIn(var_18, 67, 3, 0); 
-  __visc__bindIn(var_18, 68, 4, 0); 
-  __visc__bindIn(var_18, 69, 5, 0); 
-  __visc__bindIn(var_18, 70, 6, 0); 
-  __visc__bindIn(var_18, 71, 7, 0); 
-  __visc__bindIn(var_18, 72, 8, 0); 
-  __visc__bindIn(var_18, 73, 9, 0); 
-
-  void* var_19 = __visc__createNodeND(0, var_19_node); 
-
-  __visc__edge(var_18, var_19, 1, 0, 0, 0); 
-  __visc__edge(var_18, var_19, 1, 1, 1, 0); 
-
-  void* var_20 = __visc__createNodeND(0, var_20_node); 
-
-  __visc__edge(var_19, var_20, 1, 0, 0, 0); 
-  __visc__edge(var_19, var_20, 1, 1, 1, 0); 
-  __visc__bindIn(var_20, 74, 2, 0); 
-  __visc__bindIn(var_20, 75, 3, 0); 
-
-  void* var_21 = __visc__createNodeND(0, var_21_node); 
-
-  __visc__edge(var_20, var_21, 1, 0, 0, 0); 
-  __visc__edge(var_20, var_21, 1, 1, 1, 0); 
-  __visc__bindIn(var_21, 76, 2, 0); 
-  __visc__bindIn(var_21, 77, 3, 0); 
-  __visc__bindIn(var_21, 78, 4, 0); 
-  __visc__bindIn(var_21, 79, 5, 0); 
-  __visc__bindIn(var_21, 80, 6, 0); 
-  __visc__bindIn(var_21, 81, 7, 0); 
-  __visc__bindIn(var_21, 82, 8, 0); 
-  __visc__bindIn(var_21, 83, 9, 0); 
-
-  void* var_22 = __visc__createNodeND(0, var_22_node); 
-
-  __visc__edge(var_13, var_22, 1, 0, 0, 0); 
-  __visc__edge(var_13, var_22, 1, 1, 1, 0); 
-  __visc__edge(var_21, var_22, 1, 0, 2, 0); 
-  __visc__edge(var_21, var_22, 1, 1, 3, 0); 
-
-  void* var_23 = __visc__createNodeND(0, var_23_node); 
-
-  __visc__edge(var_22, var_23, 1, 0, 0, 0); 
-  __visc__edge(var_22, var_23, 1, 1, 1, 0); 
-  __visc__bindIn(var_23, 84, 2, 0); 
-  __visc__bindIn(var_23, 85, 3, 0); 
-
-  void* var_24 = __visc__createNodeND(0, var_24_node); 
-
-  __visc__edge(var_23, var_24, 1, 0, 0, 0); 
-  __visc__edge(var_23, var_24, 1, 1, 1, 0); 
-  __visc__bindIn(var_24, 86, 2, 0); 
-  __visc__bindIn(var_24, 87, 3, 0); 
-  __visc__bindIn(var_24, 88, 4, 0); 
-  __visc__bindIn(var_24, 89, 5, 0); 
-  __visc__bindIn(var_24, 90, 6, 0); 
-  __visc__bindIn(var_24, 91, 7, 0); 
-  __visc__bindIn(var_24, 92, 8, 0); 
-  __visc__bindIn(var_24, 93, 9, 0); 
-
-  void* var_25 = __visc__createNodeND(0, var_25_node); 
-
-  __visc__edge(var_24, var_25, 1, 0, 0, 0); 
-  __visc__edge(var_24, var_25, 1, 1, 1, 0); 
-
-  void* var_26 = __visc__createNodeND(0, var_26_node); 
-
-  __visc__edge(var_25, var_26, 1, 0, 0, 0); 
-  __visc__edge(var_25, var_26, 1, 1, 1, 0); 
-  __visc__bindIn(var_26, 94, 2, 0); 
-  __visc__bindIn(var_26, 95, 3, 0); 
-
-  void* var_27 = __visc__createNodeND(0, var_27_node); 
-
-  __visc__edge(var_26, var_27, 1, 0, 0, 0); 
-  __visc__edge(var_26, var_27, 1, 1, 1, 0); 
-  __visc__bindIn(var_27, 96, 2, 0); 
-  __visc__bindIn(var_27, 97, 3, 0); 
-  __visc__bindIn(var_27, 98, 4, 0); 
-  __visc__bindIn(var_27, 99, 5, 0); 
-  __visc__bindIn(var_27, 100, 6, 0); 
-  __visc__bindIn(var_27, 101, 7, 0); 
-  __visc__bindIn(var_27, 102, 8, 0); 
-  __visc__bindIn(var_27, 103, 9, 0); 
-
-  void* var_28 = __visc__createNodeND(0, var_28_node); 
-
-  __visc__edge(var_27, var_28, 1, 0, 0, 0); 
-  __visc__edge(var_27, var_28, 1, 1, 1, 0); 
-
-  void* var_29 = __visc__createNodeND(0, var_29_node); 
-
-  __visc__edge(var_28, var_29, 1, 0, 0, 0); 
-  __visc__edge(var_28, var_29, 1, 1, 1, 0); 
-  __visc__bindIn(var_29, 104, 2, 0); 
-  __visc__bindIn(var_29, 105, 3, 0); 
-
-  void* var_30 = __visc__createNodeND(0, var_30_node); 
-
-  __visc__edge(var_29, var_30, 1, 0, 0, 0); 
-  __visc__edge(var_29, var_30, 1, 1, 1, 0); 
-  __visc__bindIn(var_30, 106, 2, 0); 
-  __visc__bindIn(var_30, 107, 3, 0); 
-  __visc__bindIn(var_30, 108, 4, 0); 
-  __visc__bindIn(var_30, 109, 5, 0); 
-  __visc__bindIn(var_30, 110, 6, 0); 
-  __visc__bindIn(var_30, 111, 7, 0); 
-  __visc__bindIn(var_30, 112, 8, 0); 
-  __visc__bindIn(var_30, 113, 9, 0); 
-
-  void* var_31 = __visc__createNodeND(0, var_31_node); 
-
-  __visc__edge(var_30, var_31, 1, 0, 0, 0); 
-  __visc__edge(var_30, var_31, 1, 1, 1, 0); 
-  __visc__bindIn(var_31, 114, 2, 0); 
-  __visc__bindIn(var_31, 115, 3, 0); 
-
-  void* var_32 = __visc__createNodeND(0, var_32_node); 
-
-  __visc__edge(var_31, var_32, 1, 0, 0, 0); 
-  __visc__edge(var_31, var_32, 1, 1, 1, 0); 
-  __visc__bindIn(var_32, 116, 2, 0); 
-  __visc__bindIn(var_32, 117, 3, 0); 
-  __visc__bindIn(var_32, 118, 4, 0); 
-  __visc__bindIn(var_32, 119, 5, 0); 
-  __visc__bindIn(var_32, 120, 6, 0); 
-  __visc__bindIn(var_32, 121, 7, 0); 
-  __visc__bindIn(var_32, 122, 8, 0); 
-  __visc__bindIn(var_32, 123, 9, 0); 
-
-  void* var_33 = __visc__createNodeND(0, var_33_node); 
-
-  __visc__edge(var_32, var_33, 1, 0, 0, 0); 
-  __visc__edge(var_32, var_33, 1, 1, 1, 0); 
-
-  void* var_34 = __visc__createNodeND(0, var_34_node); 
-
-  __visc__edge(var_33, var_34, 1, 0, 0, 0); 
-  __visc__edge(var_33, var_34, 1, 1, 1, 0); 
-  __visc__bindIn(var_34, 124, 2, 0); 
-  __visc__bindIn(var_34, 125, 3, 0); 
-
-  void* var_35 = __visc__createNodeND(0, var_35_node); 
-
-  __visc__edge(var_34, var_35, 1, 0, 0, 0); 
-  __visc__edge(var_34, var_35, 1, 1, 1, 0); 
-  __visc__bindIn(var_35, 126, 2, 0); 
-  __visc__bindIn(var_35, 127, 3, 0); 
-  __visc__bindIn(var_35, 128, 4, 0); 
-  __visc__bindIn(var_35, 129, 5, 0); 
-  __visc__bindIn(var_35, 130, 6, 0); 
-  __visc__bindIn(var_35, 131, 7, 0); 
-  __visc__bindIn(var_35, 132, 8, 0); 
-  __visc__bindIn(var_35, 133, 9, 0); 
-
-  void* var_36 = __visc__createNodeND(0, var_36_node); 
-
-  __visc__edge(var_35, var_36, 1, 0, 0, 0); 
-  __visc__edge(var_35, var_36, 1, 1, 1, 0); 
-
-  void* var_37 = __visc__createNodeND(0, var_37_node); 
-
-  __visc__edge(var_36, var_37, 1, 0, 0, 0); 
-  __visc__edge(var_36, var_37, 1, 1, 1, 0); 
-  __visc__bindIn(var_37, 134, 2, 0); 
-  __visc__bindIn(var_37, 135, 3, 0); 
-
-  void* var_38 = __visc__createNodeND(0, var_38_node); 
-
-  __visc__edge(var_37, var_38, 1, 0, 0, 0); 
-  __visc__edge(var_37, var_38, 1, 1, 1, 0); 
-  __visc__bindIn(var_38, 136, 2, 0); 
-  __visc__bindIn(var_38, 137, 3, 0); 
-  __visc__bindIn(var_38, 138, 4, 0); 
-  __visc__bindIn(var_38, 139, 5, 0); 
-  __visc__bindIn(var_38, 140, 6, 0); 
-  __visc__bindIn(var_38, 141, 7, 0); 
-  __visc__bindIn(var_38, 142, 8, 0); 
-  __visc__bindIn(var_38, 143, 9, 0); 
-
-  void* var_39 = __visc__createNodeND(0, var_39_node); 
-
-  __visc__edge(var_30, var_39, 1, 0, 0, 0); 
-  __visc__edge(var_30, var_39, 1, 1, 1, 0); 
-  __visc__edge(var_38, var_39, 1, 0, 2, 0); 
-  __visc__edge(var_38, var_39, 1, 1, 3, 0); 
-
-  void* var_40 = __visc__createNodeND(0, var_40_node); 
-
-  __visc__edge(var_39, var_40, 1, 0, 0, 0); 
-  __visc__edge(var_39, var_40, 1, 1, 1, 0); 
-  __visc__bindIn(var_40, 144, 2, 0); 
-  __visc__bindIn(var_40, 145, 3, 0); 
-
-  void* var_41 = __visc__createNodeND(0, var_41_node); 
-
-  __visc__edge(var_40, var_41, 1, 0, 0, 0); 
-  __visc__edge(var_40, var_41, 1, 1, 1, 0); 
-  __visc__bindIn(var_41, 146, 2, 0); 
-  __visc__bindIn(var_41, 147, 3, 0); 
-  __visc__bindIn(var_41, 148, 4, 0); 
-  __visc__bindIn(var_41, 149, 5, 0); 
-  __visc__bindIn(var_41, 150, 6, 0); 
-  __visc__bindIn(var_41, 151, 7, 0); 
-  __visc__bindIn(var_41, 152, 8, 0); 
-  __visc__bindIn(var_41, 153, 9, 0); 
-
-  void* var_42 = __visc__createNodeND(0, var_42_node); 
-
-  __visc__edge(var_41, var_42, 1, 0, 0, 0); 
-  __visc__edge(var_41, var_42, 1, 1, 1, 0); 
-
-  void* var_43 = __visc__createNodeND(0, var_43_node); 
-
-  __visc__edge(var_42, var_43, 1, 0, 0, 0); 
-  __visc__edge(var_42, var_43, 1, 1, 1, 0); 
-  __visc__bindIn(var_43, 154, 2, 0); 
-  __visc__bindIn(var_43, 155, 3, 0); 
-
-  void* var_44 = __visc__createNodeND(0, var_44_node); 
-
-  __visc__edge(var_43, var_44, 1, 0, 0, 0); 
-  __visc__edge(var_43, var_44, 1, 1, 1, 0); 
-  __visc__bindIn(var_44, 156, 2, 0); 
-  __visc__bindIn(var_44, 157, 3, 0); 
-  __visc__bindIn(var_44, 158, 4, 0); 
-  __visc__bindIn(var_44, 159, 5, 0); 
-  __visc__bindIn(var_44, 160, 6, 0); 
-  __visc__bindIn(var_44, 161, 7, 0); 
-  __visc__bindIn(var_44, 162, 8, 0); 
-  __visc__bindIn(var_44, 163, 9, 0); 
-
-  void* var_45 = __visc__createNodeND(0, var_45_node); 
-
-  __visc__edge(var_44, var_45, 1, 0, 0, 0); 
-  __visc__edge(var_44, var_45, 1, 1, 1, 0); 
-
-  void* var_46 = __visc__createNodeND(0, var_46_node); 
-
-  __visc__edge(var_45, var_46, 1, 0, 0, 0); 
-  __visc__edge(var_45, var_46, 1, 1, 1, 0); 
-  __visc__bindIn(var_46, 164, 2, 0); 
-  __visc__bindIn(var_46, 165, 3, 0); 
-
-  void* var_47 = __visc__createNodeND(0, var_47_node); 
-
-  __visc__edge(var_46, var_47, 1, 0, 0, 0); 
-  __visc__edge(var_46, var_47, 1, 1, 1, 0); 
-  __visc__bindIn(var_47, 166, 2, 0); 
-  __visc__bindIn(var_47, 167, 3, 0); 
-  __visc__bindIn(var_47, 168, 4, 0); 
-  __visc__bindIn(var_47, 169, 5, 0); 
-  __visc__bindIn(var_47, 170, 6, 0); 
-  __visc__bindIn(var_47, 171, 7, 0); 
-  __visc__bindIn(var_47, 172, 8, 0); 
-  __visc__bindIn(var_47, 173, 9, 0); 
-
-  void* var_48 = __visc__createNodeND(0, var_48_node); 
-
-  __visc__edge(var_39, var_48, 1, 0, 0, 0); 
-  __visc__edge(var_39, var_48, 1, 1, 1, 0); 
-  __visc__edge(var_47, var_48, 1, 0, 2, 0); 
-  __visc__edge(var_47, var_48, 1, 1, 3, 0); 
-
-  void* var_49 = __visc__createNodeND(0, var_49_node); 
-
-  __visc__edge(var_48, var_49, 1, 0, 0, 0); 
-  __visc__edge(var_48, var_49, 1, 1, 1, 0); 
-  __visc__bindIn(var_49, 174, 2, 0); 
-  __visc__bindIn(var_49, 175, 3, 0); 
-
-  void* var_50 = __visc__createNodeND(0, var_50_node); 
-
-  __visc__edge(var_49, var_50, 1, 0, 0, 0); 
-  __visc__edge(var_49, var_50, 1, 1, 1, 0); 
-  __visc__bindIn(var_50, 176, 2, 0); 
-  __visc__bindIn(var_50, 177, 3, 0); 
-  __visc__bindIn(var_50, 178, 4, 0); 
-  __visc__bindIn(var_50, 179, 5, 0); 
-  __visc__bindIn(var_50, 180, 6, 0); 
-  __visc__bindIn(var_50, 181, 7, 0); 
-  __visc__bindIn(var_50, 182, 8, 0); 
-  __visc__bindIn(var_50, 183, 9, 0); 
-
-  void* var_51 = __visc__createNodeND(0, var_51_node); 
-
-  __visc__edge(var_50, var_51, 1, 0, 0, 0); 
-  __visc__edge(var_50, var_51, 1, 1, 1, 0); 
-
-  void* var_52 = __visc__createNodeND(0, var_52_node); 
-
-  __visc__edge(var_51, var_52, 1, 0, 0, 0); 
-  __visc__edge(var_51, var_52, 1, 1, 1, 0); 
-  __visc__bindIn(var_52, 184, 2, 0); 
-  __visc__bindIn(var_52, 185, 3, 0); 
-
-  void* var_53 = __visc__createNodeND(0, var_53_node); 
-
-  __visc__edge(var_52, var_53, 1, 0, 0, 0); 
-  __visc__edge(var_52, var_53, 1, 1, 1, 0); 
-  __visc__bindIn(var_53, 186, 2, 0); 
-  __visc__bindIn(var_53, 187, 3, 0); 
-  __visc__bindIn(var_53, 188, 4, 0); 
-  __visc__bindIn(var_53, 189, 5, 0); 
-  __visc__bindIn(var_53, 190, 6, 0); 
-  __visc__bindIn(var_53, 191, 7, 0); 
-  __visc__bindIn(var_53, 192, 8, 0); 
-  __visc__bindIn(var_53, 193, 9, 0); 
-
-  void* var_54 = __visc__createNodeND(0, var_54_node); 
-
-  __visc__edge(var_53, var_54, 1, 0, 0, 0); 
-  __visc__edge(var_53, var_54, 1, 1, 1, 0); 
-
-  void* var_55 = __visc__createNodeND(0, var_55_node); 
-
-  __visc__edge(var_54, var_55, 1, 0, 0, 0); 
-  __visc__edge(var_54, var_55, 1, 1, 1, 0); 
-  __visc__bindIn(var_55, 194, 2, 0); 
-  __visc__bindIn(var_55, 195, 3, 0); 
-
-  void* var_56 = __visc__createNodeND(0, var_56_node); 
-
-  __visc__edge(var_55, var_56, 1, 0, 0, 0); 
-  __visc__edge(var_55, var_56, 1, 1, 1, 0); 
-  __visc__bindIn(var_56, 196, 2, 0); 
-  __visc__bindIn(var_56, 197, 3, 0); 
-  __visc__bindIn(var_56, 198, 4, 0); 
-  __visc__bindIn(var_56, 199, 5, 0); 
-  __visc__bindIn(var_56, 200, 6, 0); 
-  __visc__bindIn(var_56, 201, 7, 0); 
-  __visc__bindIn(var_56, 202, 8, 0); 
-  __visc__bindIn(var_56, 203, 9, 0); 
-
-  void* var_57 = __visc__createNodeND(0, var_57_node); 
-
-  __visc__edge(var_56, var_57, 1, 0, 0, 0); 
-  __visc__edge(var_56, var_57, 1, 1, 1, 0); 
-  __visc__bindIn(var_57, 204, 2, 0); 
-  __visc__bindIn(var_57, 205, 3, 0); 
-
-  void* var_58 = __visc__createNodeND(0, var_58_node); 
-
-  __visc__edge(var_57, var_58, 1, 0, 0, 0); 
-  __visc__edge(var_57, var_58, 1, 1, 1, 0); 
-  __visc__bindIn(var_58, 206, 2, 0); 
-  __visc__bindIn(var_58, 207, 3, 0); 
-  __visc__bindIn(var_58, 208, 4, 0); 
-  __visc__bindIn(var_58, 209, 5, 0); 
-  __visc__bindIn(var_58, 210, 6, 0); 
-  __visc__bindIn(var_58, 211, 7, 0); 
-  __visc__bindIn(var_58, 212, 8, 0); 
-  __visc__bindIn(var_58, 213, 9, 0); 
-
-  void* var_59 = __visc__createNodeND(0, var_59_node); 
-
-  __visc__edge(var_58, var_59, 1, 0, 0, 0); 
-  __visc__edge(var_58, var_59, 1, 1, 1, 0); 
-
-  void* var_60 = __visc__createNodeND(0, var_60_node); 
-
-  __visc__edge(var_59, var_60, 1, 0, 0, 0); 
-  __visc__edge(var_59, var_60, 1, 1, 1, 0); 
-  __visc__bindIn(var_60, 214, 2, 0); 
-  __visc__bindIn(var_60, 215, 3, 0); 
-
-  void* var_61 = __visc__createNodeND(0, var_61_node); 
-
-  __visc__edge(var_60, var_61, 1, 0, 0, 0); 
-  __visc__edge(var_60, var_61, 1, 1, 1, 0); 
-  __visc__bindIn(var_61, 216, 2, 0); 
-  __visc__bindIn(var_61, 217, 3, 0); 
-  __visc__bindIn(var_61, 218, 4, 0); 
-  __visc__bindIn(var_61, 219, 5, 0); 
-  __visc__bindIn(var_61, 220, 6, 0); 
-  __visc__bindIn(var_61, 221, 7, 0); 
-  __visc__bindIn(var_61, 222, 8, 0); 
-  __visc__bindIn(var_61, 223, 9, 0); 
-
-  void* var_62 = __visc__createNodeND(0, var_62_node); 
-
-  __visc__edge(var_61, var_62, 1, 0, 0, 0); 
-  __visc__edge(var_61, var_62, 1, 1, 1, 0); 
-
-  void* var_63 = __visc__createNodeND(0, var_63_node); 
-
-  __visc__edge(var_62, var_63, 1, 0, 0, 0); 
-  __visc__edge(var_62, var_63, 1, 1, 1, 0); 
-  __visc__bindIn(var_63, 224, 2, 0); 
-  __visc__bindIn(var_63, 225, 3, 0); 
-
-  void* var_64 = __visc__createNodeND(0, var_64_node); 
-
-  __visc__edge(var_63, var_64, 1, 0, 0, 0); 
-  __visc__edge(var_63, var_64, 1, 1, 1, 0); 
-  __visc__bindIn(var_64, 226, 2, 0); 
-  __visc__bindIn(var_64, 227, 3, 0); 
-  __visc__bindIn(var_64, 228, 4, 0); 
-  __visc__bindIn(var_64, 229, 5, 0); 
-  __visc__bindIn(var_64, 230, 6, 0); 
-  __visc__bindIn(var_64, 231, 7, 0); 
-  __visc__bindIn(var_64, 232, 8, 0); 
-  __visc__bindIn(var_64, 233, 9, 0); 
-
-  void* var_65 = __visc__createNodeND(0, var_65_node); 
-
-  __visc__edge(var_56, var_65, 1, 0, 0, 0); 
-  __visc__edge(var_56, var_65, 1, 1, 1, 0); 
-  __visc__edge(var_64, var_65, 1, 0, 2, 0); 
-  __visc__edge(var_64, var_65, 1, 1, 3, 0); 
-
-  void* var_66 = __visc__createNodeND(0, var_66_node); 
-
-  __visc__edge(var_65, var_66, 1, 0, 0, 0); 
-  __visc__edge(var_65, var_66, 1, 1, 1, 0); 
-  __visc__bindIn(var_66, 234, 2, 0); 
-  __visc__bindIn(var_66, 235, 3, 0); 
-
-  void* var_67 = __visc__createNodeND(0, var_67_node); 
-
-  __visc__edge(var_66, var_67, 1, 0, 0, 0); 
-  __visc__edge(var_66, var_67, 1, 1, 1, 0); 
-  __visc__bindIn(var_67, 236, 2, 0); 
-  __visc__bindIn(var_67, 237, 3, 0); 
-  __visc__bindIn(var_67, 238, 4, 0); 
-  __visc__bindIn(var_67, 239, 5, 0); 
-  __visc__bindIn(var_67, 240, 6, 0); 
-  __visc__bindIn(var_67, 241, 7, 0); 
-  __visc__bindIn(var_67, 242, 8, 0); 
-  __visc__bindIn(var_67, 243, 9, 0); 
-
-  void* var_68 = __visc__createNodeND(0, var_68_node); 
-
-  __visc__edge(var_67, var_68, 1, 0, 0, 0); 
-  __visc__edge(var_67, var_68, 1, 1, 1, 0); 
-
-  void* var_69 = __visc__createNodeND(0, var_69_node); 
-
-  __visc__edge(var_68, var_69, 1, 0, 0, 0); 
-  __visc__edge(var_68, var_69, 1, 1, 1, 0); 
-  __visc__bindIn(var_69, 244, 2, 0); 
-  __visc__bindIn(var_69, 245, 3, 0); 
-
-  void* var_70 = __visc__createNodeND(0, var_70_node); 
-
-  __visc__edge(var_69, var_70, 1, 0, 0, 0); 
-  __visc__edge(var_69, var_70, 1, 1, 1, 0); 
-  __visc__bindIn(var_70, 246, 2, 0); 
-  __visc__bindIn(var_70, 247, 3, 0); 
-  __visc__bindIn(var_70, 248, 4, 0); 
-  __visc__bindIn(var_70, 249, 5, 0); 
-  __visc__bindIn(var_70, 250, 6, 0); 
-  __visc__bindIn(var_70, 251, 7, 0); 
-  __visc__bindIn(var_70, 252, 8, 0); 
-  __visc__bindIn(var_70, 253, 9, 0); 
-
-  void* var_71 = __visc__createNodeND(0, var_71_node); 
-
-  __visc__edge(var_70, var_71, 1, 0, 0, 0); 
-  __visc__edge(var_70, var_71, 1, 1, 1, 0); 
-
-  void* var_72 = __visc__createNodeND(0, var_72_node); 
-
-  __visc__edge(var_71, var_72, 1, 0, 0, 0); 
-  __visc__edge(var_71, var_72, 1, 1, 1, 0); 
-  __visc__bindIn(var_72, 254, 2, 0); 
-  __visc__bindIn(var_72, 255, 3, 0); 
-
-  void* var_73 = __visc__createNodeND(0, var_73_node); 
-
-  __visc__edge(var_72, var_73, 1, 0, 0, 0); 
-  __visc__edge(var_72, var_73, 1, 1, 1, 0); 
-  __visc__bindIn(var_73, 256, 2, 0); 
-  __visc__bindIn(var_73, 257, 3, 0); 
-  __visc__bindIn(var_73, 258, 4, 0); 
-  __visc__bindIn(var_73, 259, 5, 0); 
-  __visc__bindIn(var_73, 260, 6, 0); 
-  __visc__bindIn(var_73, 261, 7, 0); 
-  __visc__bindIn(var_73, 262, 8, 0); 
-  __visc__bindIn(var_73, 263, 9, 0); 
-
-  void* var_74 = __visc__createNodeND(0, var_74_node); 
-
-  __visc__edge(var_65, var_74, 1, 0, 0, 0); 
-  __visc__edge(var_65, var_74, 1, 1, 1, 0); 
-  __visc__edge(var_73, var_74, 1, 0, 2, 0); 
-  __visc__edge(var_73, var_74, 1, 1, 3, 0); 
-
-  void* var_75 = __visc__createNodeND(0, var_75_node); 
-
-  __visc__edge(var_74, var_75, 1, 0, 0, 0); 
-  __visc__edge(var_74, var_75, 1, 1, 1, 0); 
-  __visc__bindIn(var_75, 264, 2, 0); 
-  __visc__bindIn(var_75, 265, 3, 0); 
-
-  void* var_76 = __visc__createNodeND(0, var_76_node); 
-
-  __visc__edge(var_75, var_76, 1, 0, 0, 0); 
-  __visc__edge(var_75, var_76, 1, 1, 1, 0); 
-  __visc__bindIn(var_76, 266, 2, 0); 
-  __visc__bindIn(var_76, 267, 3, 0); 
-  __visc__bindIn(var_76, 268, 4, 0); 
-  __visc__bindIn(var_76, 269, 5, 0); 
-  __visc__bindIn(var_76, 270, 6, 0); 
-  __visc__bindIn(var_76, 271, 7, 0); 
-  __visc__bindIn(var_76, 272, 8, 0); 
-  __visc__bindIn(var_76, 273, 9, 0); 
-
-  void* var_77 = __visc__createNodeND(0, var_77_node); 
-
-  __visc__edge(var_76, var_77, 1, 0, 0, 0); 
-  __visc__edge(var_76, var_77, 1, 1, 1, 0); 
-
-  void* var_78 = __visc__createNodeND(0, var_78_node); 
-
-  __visc__edge(var_77, var_78, 1, 0, 0, 0); 
-  __visc__edge(var_77, var_78, 1, 1, 1, 0); 
-  __visc__bindIn(var_78, 274, 2, 0); 
-  __visc__bindIn(var_78, 275, 3, 0); 
-
-  void* var_79 = __visc__createNodeND(0, var_79_node); 
-
-  __visc__edge(var_78, var_79, 1, 0, 0, 0); 
-  __visc__edge(var_78, var_79, 1, 1, 1, 0); 
-  __visc__bindIn(var_79, 276, 2, 0); 
-  __visc__bindIn(var_79, 277, 3, 0); 
-  __visc__bindIn(var_79, 278, 4, 0); 
-  __visc__bindIn(var_79, 279, 5, 0); 
-  __visc__bindIn(var_79, 280, 6, 0); 
-  __visc__bindIn(var_79, 281, 7, 0); 
-  __visc__bindIn(var_79, 282, 8, 0); 
-  __visc__bindIn(var_79, 283, 9, 0); 
-
-  void* var_80 = __visc__createNodeND(0, var_80_node); 
-
-  __visc__edge(var_79, var_80, 1, 0, 0, 0); 
-  __visc__edge(var_79, var_80, 1, 1, 1, 0); 
-
-  void* var_81 = __visc__createNodeND(0, var_81_node); 
-
-  __visc__edge(var_80, var_81, 1, 0, 0, 0); 
-  __visc__edge(var_80, var_81, 1, 1, 1, 0); 
-  __visc__bindIn(var_81, 284, 2, 0); 
-  __visc__bindIn(var_81, 285, 3, 0); 
-
-  void* var_82 = __visc__createNodeND(0, var_82_node); 
-
-  __visc__edge(var_81, var_82, 1, 0, 0, 0); 
-  __visc__edge(var_81, var_82, 1, 1, 1, 0); 
-  __visc__bindIn(var_82, 286, 2, 0); 
-  __visc__bindIn(var_82, 287, 3, 0); 
-  __visc__bindIn(var_82, 288, 4, 0); 
-  __visc__bindIn(var_82, 289, 5, 0); 
-  __visc__bindIn(var_82, 290, 6, 0); 
-  __visc__bindIn(var_82, 291, 7, 0); 
-  __visc__bindIn(var_82, 292, 8, 0); 
-  __visc__bindIn(var_82, 293, 9, 0); 
-
-  void* var_83 = __visc__createNodeND(0, var_83_node); 
-
-  __visc__edge(var_74, var_83, 1, 0, 0, 0); 
-  __visc__edge(var_74, var_83, 1, 1, 1, 0); 
-  __visc__edge(var_82, var_83, 1, 0, 2, 0); 
-  __visc__edge(var_82, var_83, 1, 1, 3, 0); 
-
-  void* var_84 = __visc__createNodeND(0, var_84_node); 
-
-  __visc__edge(var_83, var_84, 1, 0, 0, 0); 
-  __visc__edge(var_83, var_84, 1, 1, 1, 0); 
-  __visc__bindIn(var_84, 294, 2, 0); 
-  __visc__bindIn(var_84, 295, 3, 0); 
-
-  void* var_85 = __visc__createNodeND(0, var_85_node); 
-
-  __visc__edge(var_84, var_85, 1, 0, 0, 0); 
-  __visc__edge(var_84, var_85, 1, 1, 1, 0); 
-  __visc__bindIn(var_85, 296, 2, 0); 
-  __visc__bindIn(var_85, 297, 3, 0); 
-  __visc__bindIn(var_85, 298, 4, 0); 
-  __visc__bindIn(var_85, 299, 5, 0); 
-  __visc__bindIn(var_85, 300, 6, 0); 
-  __visc__bindIn(var_85, 301, 7, 0); 
-  __visc__bindIn(var_85, 302, 8, 0); 
-  __visc__bindIn(var_85, 303, 9, 0); 
-
-  void* var_86 = __visc__createNodeND(0, var_86_node); 
-
-  __visc__edge(var_85, var_86, 1, 0, 0, 0); 
-  __visc__edge(var_85, var_86, 1, 1, 1, 0); 
-
-  void* var_87 = __visc__createNodeND(0, var_87_node); 
-
-  __visc__edge(var_86, var_87, 1, 0, 0, 0); 
-  __visc__edge(var_86, var_87, 1, 1, 1, 0); 
-  __visc__bindIn(var_87, 304, 2, 0); 
-  __visc__bindIn(var_87, 305, 3, 0); 
-
-  void* var_88 = __visc__createNodeND(0, var_88_node); 
-
-  __visc__edge(var_87, var_88, 1, 0, 0, 0); 
-  __visc__edge(var_87, var_88, 1, 1, 1, 0); 
-  __visc__bindIn(var_88, 306, 2, 0); 
-  __visc__bindIn(var_88, 307, 3, 0); 
-  __visc__bindIn(var_88, 308, 4, 0); 
-  __visc__bindIn(var_88, 309, 5, 0); 
-  __visc__bindIn(var_88, 310, 6, 0); 
-  __visc__bindIn(var_88, 311, 7, 0); 
-  __visc__bindIn(var_88, 312, 8, 0); 
-  __visc__bindIn(var_88, 313, 9, 0); 
-
-  void* var_89 = __visc__createNodeND(0, var_89_node); 
-
-  __visc__edge(var_88, var_89, 1, 0, 0, 0); 
-  __visc__edge(var_88, var_89, 1, 1, 1, 0); 
-
-  void* var_90 = __visc__createNodeND(0, var_90_node); 
-
-  __visc__edge(var_89, var_90, 1, 0, 0, 0); 
-  __visc__edge(var_89, var_90, 1, 1, 1, 0); 
-  __visc__bindIn(var_90, 314, 2, 0); 
-  __visc__bindIn(var_90, 315, 3, 0); 
-
-  void* var_91 = __visc__createNodeND(0, var_91_node); 
-
-  __visc__edge(var_90, var_91, 1, 0, 0, 0); 
-  __visc__edge(var_90, var_91, 1, 1, 1, 0); 
-  __visc__bindIn(var_91, 316, 2, 0); 
-  __visc__bindIn(var_91, 317, 3, 0); 
-  __visc__bindIn(var_91, 318, 4, 0); 
-  __visc__bindIn(var_91, 319, 5, 0); 
-  __visc__bindIn(var_91, 320, 6, 0); 
-  __visc__bindIn(var_91, 321, 7, 0); 
-  __visc__bindIn(var_91, 322, 8, 0); 
-  __visc__bindIn(var_91, 323, 9, 0); 
-
-  void* var_92 = __visc__createNodeND(0, var_92_node); 
-
-  __visc__edge(var_91, var_92, 1, 0, 0, 0); 
-  __visc__edge(var_91, var_92, 1, 1, 1, 0); 
-  __visc__bindIn(var_92, 324, 2, 0); 
-  __visc__bindIn(var_92, 325, 3, 0); 
-
-  void* var_93 = __visc__createNodeND(0, var_93_node); 
-
-  __visc__edge(var_92, var_93, 1, 0, 0, 0); 
-  __visc__edge(var_92, var_93, 1, 1, 1, 0); 
-  __visc__bindIn(var_93, 326, 2, 0); 
-  __visc__bindIn(var_93, 327, 3, 0); 
-  __visc__bindIn(var_93, 328, 4, 0); 
-  __visc__bindIn(var_93, 329, 5, 0); 
-  __visc__bindIn(var_93, 330, 6, 0); 
-  __visc__bindIn(var_93, 331, 7, 0); 
-  __visc__bindIn(var_93, 332, 8, 0); 
-  __visc__bindIn(var_93, 333, 9, 0); 
-
-  void* var_94 = __visc__createNodeND(0, var_94_node); 
-
-  __visc__edge(var_93, var_94, 1, 0, 0, 0); 
-  __visc__edge(var_93, var_94, 1, 1, 1, 0); 
-
-  void* var_95 = __visc__createNodeND(0, var_95_node); 
-
-  __visc__edge(var_94, var_95, 1, 0, 0, 0); 
-  __visc__edge(var_94, var_95, 1, 1, 1, 0); 
-  __visc__bindIn(var_95, 334, 2, 0); 
-  __visc__bindIn(var_95, 335, 3, 0); 
-
-  void* var_96 = __visc__createNodeND(0, var_96_node); 
-
-  __visc__edge(var_95, var_96, 1, 0, 0, 0); 
-  __visc__edge(var_95, var_96, 1, 1, 1, 0); 
-  __visc__bindIn(var_96, 336, 2, 0); 
-  __visc__bindIn(var_96, 337, 3, 0); 
-  __visc__bindIn(var_96, 338, 4, 0); 
-  __visc__bindIn(var_96, 339, 5, 0); 
-  __visc__bindIn(var_96, 340, 6, 0); 
-  __visc__bindIn(var_96, 341, 7, 0); 
-  __visc__bindIn(var_96, 342, 8, 0); 
-  __visc__bindIn(var_96, 343, 9, 0); 
-
-  void* var_97 = __visc__createNodeND(0, var_97_node); 
-
-  __visc__edge(var_96, var_97, 1, 0, 0, 0); 
-  __visc__edge(var_96, var_97, 1, 1, 1, 0); 
-
-  void* var_98 = __visc__createNodeND(0, var_98_node); 
-
-  __visc__edge(var_97, var_98, 1, 0, 0, 0); 
-  __visc__edge(var_97, var_98, 1, 1, 1, 0); 
-  __visc__bindIn(var_98, 344, 2, 0); 
-  __visc__bindIn(var_98, 345, 3, 0); 
-
-  void* var_99 = __visc__createNodeND(0, var_99_node); 
-
-  __visc__edge(var_98, var_99, 1, 0, 0, 0); 
-  __visc__edge(var_98, var_99, 1, 1, 1, 0); 
-  __visc__bindIn(var_99, 346, 2, 0); 
-  __visc__bindIn(var_99, 347, 3, 0); 
-  __visc__bindIn(var_99, 348, 4, 0); 
-  __visc__bindIn(var_99, 349, 5, 0); 
-  __visc__bindIn(var_99, 350, 6, 0); 
-  __visc__bindIn(var_99, 351, 7, 0); 
-  __visc__bindIn(var_99, 352, 8, 0); 
-  __visc__bindIn(var_99, 353, 9, 0); 
-
-  void* var_100 = __visc__createNodeND(0, var_100_node); 
-
-  __visc__edge(var_91, var_100, 1, 0, 0, 0); 
-  __visc__edge(var_91, var_100, 1, 1, 1, 0); 
-  __visc__edge(var_99, var_100, 1, 0, 2, 0); 
-  __visc__edge(var_99, var_100, 1, 1, 3, 0); 
-
-  void* var_101 = __visc__createNodeND(0, var_101_node); 
-
-  __visc__edge(var_100, var_101, 1, 0, 0, 0); 
-  __visc__edge(var_100, var_101, 1, 1, 1, 0); 
-  __visc__bindIn(var_101, 354, 2, 0); 
-  __visc__bindIn(var_101, 355, 3, 0); 
-
-  void* var_102 = __visc__createNodeND(0, var_102_node); 
-
-  __visc__edge(var_101, var_102, 1, 0, 0, 0); 
-  __visc__edge(var_101, var_102, 1, 1, 1, 0); 
-  __visc__bindIn(var_102, 356, 2, 0); 
-  __visc__bindIn(var_102, 357, 3, 0); 
-  __visc__bindIn(var_102, 358, 4, 0); 
-  __visc__bindIn(var_102, 359, 5, 0); 
-  __visc__bindIn(var_102, 360, 6, 0); 
-  __visc__bindIn(var_102, 361, 7, 0); 
-  __visc__bindIn(var_102, 362, 8, 0); 
-  __visc__bindIn(var_102, 363, 9, 0); 
-
-  void* var_103 = __visc__createNodeND(0, var_103_node); 
-
-  __visc__edge(var_102, var_103, 1, 0, 0, 0); 
-  __visc__edge(var_102, var_103, 1, 1, 1, 0); 
-
-  void* var_104 = __visc__createNodeND(0, var_104_node); 
-
-  __visc__edge(var_103, var_104, 1, 0, 0, 0); 
-  __visc__edge(var_103, var_104, 1, 1, 1, 0); 
-  __visc__bindIn(var_104, 364, 2, 0); 
-  __visc__bindIn(var_104, 365, 3, 0); 
-
-  void* var_105 = __visc__createNodeND(0, var_105_node); 
-
-  __visc__edge(var_104, var_105, 1, 0, 0, 0); 
-  __visc__edge(var_104, var_105, 1, 1, 1, 0); 
-  __visc__bindIn(var_105, 366, 2, 0); 
-  __visc__bindIn(var_105, 367, 3, 0); 
-  __visc__bindIn(var_105, 368, 4, 0); 
-  __visc__bindIn(var_105, 369, 5, 0); 
-  __visc__bindIn(var_105, 370, 6, 0); 
-  __visc__bindIn(var_105, 371, 7, 0); 
-  __visc__bindIn(var_105, 372, 8, 0); 
-  __visc__bindIn(var_105, 373, 9, 0); 
-
-  void* var_106 = __visc__createNodeND(0, var_106_node); 
-
-  __visc__edge(var_105, var_106, 1, 0, 0, 0); 
-  __visc__edge(var_105, var_106, 1, 1, 1, 0); 
-
-  void* var_107 = __visc__createNodeND(0, var_107_node); 
-
-  __visc__edge(var_106, var_107, 1, 0, 0, 0); 
-  __visc__edge(var_106, var_107, 1, 1, 1, 0); 
-  __visc__bindIn(var_107, 374, 2, 0); 
-  __visc__bindIn(var_107, 375, 3, 0); 
-
-  void* var_108 = __visc__createNodeND(0, var_108_node); 
-
-  __visc__edge(var_107, var_108, 1, 0, 0, 0); 
-  __visc__edge(var_107, var_108, 1, 1, 1, 0); 
-  __visc__bindIn(var_108, 376, 2, 0); 
-  __visc__bindIn(var_108, 377, 3, 0); 
-  __visc__bindIn(var_108, 378, 4, 0); 
-  __visc__bindIn(var_108, 379, 5, 0); 
-  __visc__bindIn(var_108, 380, 6, 0); 
-  __visc__bindIn(var_108, 381, 7, 0); 
-  __visc__bindIn(var_108, 382, 8, 0); 
-  __visc__bindIn(var_108, 383, 9, 0); 
-
-  void* var_109 = __visc__createNodeND(0, var_109_node); 
-
-  __visc__edge(var_100, var_109, 1, 0, 0, 0); 
-  __visc__edge(var_100, var_109, 1, 1, 1, 0); 
-  __visc__edge(var_108, var_109, 1, 0, 2, 0); 
-  __visc__edge(var_108, var_109, 1, 1, 3, 0); 
-
-  void* var_110 = __visc__createNodeND(0, var_110_node); 
-
-  __visc__edge(var_109, var_110, 1, 0, 0, 0); 
-  __visc__edge(var_109, var_110, 1, 1, 1, 0); 
-  __visc__bindIn(var_110, 384, 2, 0); 
-  __visc__bindIn(var_110, 385, 3, 0); 
-
-  void* var_111 = __visc__createNodeND(0, var_111_node); 
-
-  __visc__edge(var_110, var_111, 1, 0, 0, 0); 
-  __visc__edge(var_110, var_111, 1, 1, 1, 0); 
-  __visc__bindIn(var_111, 386, 2, 0); 
-  __visc__bindIn(var_111, 387, 3, 0); 
-  __visc__bindIn(var_111, 388, 4, 0); 
-  __visc__bindIn(var_111, 389, 5, 0); 
-  __visc__bindIn(var_111, 390, 6, 0); 
-  __visc__bindIn(var_111, 391, 7, 0); 
-  __visc__bindIn(var_111, 392, 8, 0); 
-  __visc__bindIn(var_111, 393, 9, 0); 
-
-  void* var_112 = __visc__createNodeND(0, var_112_node); 
-
-  __visc__edge(var_111, var_112, 1, 0, 0, 0); 
-  __visc__edge(var_111, var_112, 1, 1, 1, 0); 
-
-  void* var_113 = __visc__createNodeND(0, var_113_node); 
-
-  __visc__edge(var_112, var_113, 1, 0, 0, 0); 
-  __visc__edge(var_112, var_113, 1, 1, 1, 0); 
-  __visc__bindIn(var_113, 394, 2, 0); 
-  __visc__bindIn(var_113, 395, 3, 0); 
-
-  void* var_114 = __visc__createNodeND(0, var_114_node); 
-
-  __visc__edge(var_113, var_114, 1, 0, 0, 0); 
-  __visc__edge(var_113, var_114, 1, 1, 1, 0); 
-  __visc__bindIn(var_114, 396, 2, 0); 
-  __visc__bindIn(var_114, 397, 3, 0); 
-  __visc__bindIn(var_114, 398, 4, 0); 
-  __visc__bindIn(var_114, 399, 5, 0); 
-  __visc__bindIn(var_114, 400, 6, 0); 
-  __visc__bindIn(var_114, 401, 7, 0); 
-  __visc__bindIn(var_114, 402, 8, 0); 
-  __visc__bindIn(var_114, 403, 9, 0); 
-
-  void* var_115 = __visc__createNodeND(0, var_115_node); 
-
-  __visc__edge(var_114, var_115, 1, 0, 0, 0); 
-  __visc__edge(var_114, var_115, 1, 1, 1, 0); 
-
-  void* var_116 = __visc__createNodeND(0, var_116_node); 
-
-  __visc__edge(var_115, var_116, 1, 0, 0, 0); 
-  __visc__edge(var_115, var_116, 1, 1, 1, 0); 
-  __visc__bindIn(var_116, 404, 2, 0); 
-  __visc__bindIn(var_116, 405, 3, 0); 
-
-  void* var_117 = __visc__createNodeND(0, var_117_node); 
-
-  __visc__edge(var_116, var_117, 1, 0, 0, 0); 
-  __visc__edge(var_116, var_117, 1, 1, 1, 0); 
-  __visc__bindIn(var_117, 406, 2, 0); 
-  __visc__bindIn(var_117, 407, 3, 0); 
-  __visc__bindIn(var_117, 408, 4, 0); 
-  __visc__bindIn(var_117, 409, 5, 0); 
-  __visc__bindIn(var_117, 410, 6, 0); 
-  __visc__bindIn(var_117, 411, 7, 0); 
-  __visc__bindIn(var_117, 412, 8, 0); 
-  __visc__bindIn(var_117, 413, 9, 0); 
-
-  void* var_118 = __visc__createNodeND(0, var_118_node); 
-
-  __visc__edge(var_117, var_118, 1, 0, 0, 0); 
-  __visc__edge(var_117, var_118, 1, 1, 1, 0); 
-  __visc__bindIn(var_118, 414, 2, 0); 
-  __visc__bindIn(var_118, 415, 3, 0); 
-
-  void* var_119 = __visc__createNodeND(0, var_119_node); 
-
-  __visc__edge(var_118, var_119, 1, 0, 0, 0); 
-  __visc__edge(var_118, var_119, 1, 1, 1, 0); 
-  __visc__bindIn(var_119, 416, 2, 0); 
-  __visc__bindIn(var_119, 417, 3, 0); 
-  __visc__bindIn(var_119, 418, 4, 0); 
-  __visc__bindIn(var_119, 419, 5, 0); 
-  __visc__bindIn(var_119, 420, 6, 0); 
-  __visc__bindIn(var_119, 421, 7, 0); 
-  __visc__bindIn(var_119, 422, 8, 0); 
-  __visc__bindIn(var_119, 423, 9, 0); 
-
-  void* var_120 = __visc__createNodeND(0, var_120_node); 
-
-  __visc__edge(var_119, var_120, 1, 0, 0, 0); 
-  __visc__edge(var_119, var_120, 1, 1, 1, 0); 
-
-  void* var_121 = __visc__createNodeND(0, var_121_node); 
-
-  __visc__edge(var_120, var_121, 1, 0, 0, 0); 
-  __visc__edge(var_120, var_121, 1, 1, 1, 0); 
-  __visc__bindIn(var_121, 424, 2, 0); 
-  __visc__bindIn(var_121, 425, 3, 0); 
-
-  void* var_122 = __visc__createNodeND(0, var_122_node); 
-
-  __visc__edge(var_121, var_122, 1, 0, 0, 0); 
-  __visc__edge(var_121, var_122, 1, 1, 1, 0); 
-  __visc__bindIn(var_122, 426, 2, 0); 
-  __visc__bindIn(var_122, 427, 3, 0); 
-  __visc__bindIn(var_122, 428, 4, 0); 
-  __visc__bindIn(var_122, 429, 5, 0); 
-  __visc__bindIn(var_122, 430, 6, 0); 
-  __visc__bindIn(var_122, 431, 7, 0); 
-  __visc__bindIn(var_122, 432, 8, 0); 
-  __visc__bindIn(var_122, 433, 9, 0); 
-
-  void* var_123 = __visc__createNodeND(0, var_123_node); 
-
-  __visc__edge(var_122, var_123, 1, 0, 0, 0); 
-  __visc__edge(var_122, var_123, 1, 1, 1, 0); 
-
-  void* var_124 = __visc__createNodeND(0, var_124_node); 
-
-  __visc__edge(var_123, var_124, 1, 0, 0, 0); 
-  __visc__edge(var_123, var_124, 1, 1, 1, 0); 
-  __visc__bindIn(var_124, 434, 2, 0); 
-  __visc__bindIn(var_124, 435, 3, 0); 
-
-  void* var_125 = __visc__createNodeND(0, var_125_node); 
-
-  __visc__edge(var_124, var_125, 1, 0, 0, 0); 
-  __visc__edge(var_124, var_125, 1, 1, 1, 0); 
-  __visc__bindIn(var_125, 436, 2, 0); 
-  __visc__bindIn(var_125, 437, 3, 0); 
-  __visc__bindIn(var_125, 438, 4, 0); 
-  __visc__bindIn(var_125, 439, 5, 0); 
-  __visc__bindIn(var_125, 440, 6, 0); 
-  __visc__bindIn(var_125, 441, 7, 0); 
-  __visc__bindIn(var_125, 442, 8, 0); 
-  __visc__bindIn(var_125, 443, 9, 0); 
-
-  void* var_126 = __visc__createNodeND(0, var_126_node); 
-
-  __visc__edge(var_117, var_126, 1, 0, 0, 0); 
-  __visc__edge(var_117, var_126, 1, 1, 1, 0); 
-  __visc__edge(var_125, var_126, 1, 0, 2, 0); 
-  __visc__edge(var_125, var_126, 1, 1, 3, 0); 
-
-  void* var_127 = __visc__createNodeND(0, var_127_node); 
-
-  __visc__edge(var_126, var_127, 1, 0, 0, 0); 
-  __visc__edge(var_126, var_127, 1, 1, 1, 0); 
-  __visc__bindIn(var_127, 444, 2, 0); 
-  __visc__bindIn(var_127, 445, 3, 0); 
-
-  void* var_128 = __visc__createNodeND(0, var_128_node); 
-
-  __visc__edge(var_127, var_128, 1, 0, 0, 0); 
-  __visc__edge(var_127, var_128, 1, 1, 1, 0); 
-  __visc__bindIn(var_128, 446, 2, 0); 
-  __visc__bindIn(var_128, 447, 3, 0); 
-  __visc__bindIn(var_128, 448, 4, 0); 
-  __visc__bindIn(var_128, 449, 5, 0); 
-  __visc__bindIn(var_128, 450, 6, 0); 
-  __visc__bindIn(var_128, 451, 7, 0); 
-  __visc__bindIn(var_128, 452, 8, 0); 
-  __visc__bindIn(var_128, 453, 9, 0); 
-
-  void* var_129 = __visc__createNodeND(0, var_129_node); 
-
-  __visc__edge(var_128, var_129, 1, 0, 0, 0); 
-  __visc__edge(var_128, var_129, 1, 1, 1, 0); 
-
-  void* var_130 = __visc__createNodeND(0, var_130_node); 
-
-  __visc__edge(var_129, var_130, 1, 0, 0, 0); 
-  __visc__edge(var_129, var_130, 1, 1, 1, 0); 
-  __visc__bindIn(var_130, 454, 2, 0); 
-  __visc__bindIn(var_130, 455, 3, 0); 
-
-  void* var_131 = __visc__createNodeND(0, var_131_node); 
-
-  __visc__edge(var_130, var_131, 1, 0, 0, 0); 
-  __visc__edge(var_130, var_131, 1, 1, 1, 0); 
-  __visc__bindIn(var_131, 456, 2, 0); 
-  __visc__bindIn(var_131, 457, 3, 0); 
-  __visc__bindIn(var_131, 458, 4, 0); 
-  __visc__bindIn(var_131, 459, 5, 0); 
-  __visc__bindIn(var_131, 460, 6, 0); 
-  __visc__bindIn(var_131, 461, 7, 0); 
-  __visc__bindIn(var_131, 462, 8, 0); 
-  __visc__bindIn(var_131, 463, 9, 0); 
-
-  void* var_132 = __visc__createNodeND(0, var_132_node); 
-
-  __visc__edge(var_131, var_132, 1, 0, 0, 0); 
-  __visc__edge(var_131, var_132, 1, 1, 1, 0); 
-
-  void* var_133 = __visc__createNodeND(0, var_133_node); 
-
-  __visc__edge(var_132, var_133, 1, 0, 0, 0); 
-  __visc__edge(var_132, var_133, 1, 1, 1, 0); 
-  __visc__bindIn(var_133, 464, 2, 0); 
-  __visc__bindIn(var_133, 465, 3, 0); 
-
-  void* var_134 = __visc__createNodeND(0, var_134_node); 
-
-  __visc__edge(var_133, var_134, 1, 0, 0, 0); 
-  __visc__edge(var_133, var_134, 1, 1, 1, 0); 
-  __visc__bindIn(var_134, 466, 2, 0); 
-  __visc__bindIn(var_134, 467, 3, 0); 
-  __visc__bindIn(var_134, 468, 4, 0); 
-  __visc__bindIn(var_134, 469, 5, 0); 
-  __visc__bindIn(var_134, 470, 6, 0); 
-  __visc__bindIn(var_134, 471, 7, 0); 
-  __visc__bindIn(var_134, 472, 8, 0); 
-  __visc__bindIn(var_134, 473, 9, 0); 
-
-  void* var_135 = __visc__createNodeND(0, var_135_node); 
-
-  __visc__edge(var_126, var_135, 1, 0, 0, 0); 
-  __visc__edge(var_126, var_135, 1, 1, 1, 0); 
-  __visc__edge(var_134, var_135, 1, 0, 2, 0); 
-  __visc__edge(var_134, var_135, 1, 1, 3, 0); 
-
-  void* var_136 = __visc__createNodeND(0, var_136_node); 
-
-  __visc__edge(var_135, var_136, 1, 0, 0, 0); 
-  __visc__edge(var_135, var_136, 1, 1, 1, 0); 
-  __visc__bindIn(var_136, 474, 2, 0); 
-  __visc__bindIn(var_136, 475, 3, 0); 
-
-  void* var_137 = __visc__createNodeND(0, var_137_node); 
-
-  __visc__edge(var_136, var_137, 1, 0, 0, 0); 
-  __visc__edge(var_136, var_137, 1, 1, 1, 0); 
-  __visc__bindIn(var_137, 476, 2, 0); 
-  __visc__bindIn(var_137, 477, 3, 0); 
-  __visc__bindIn(var_137, 478, 4, 0); 
-  __visc__bindIn(var_137, 479, 5, 0); 
-  __visc__bindIn(var_137, 480, 6, 0); 
-  __visc__bindIn(var_137, 481, 7, 0); 
-  __visc__bindIn(var_137, 482, 8, 0); 
-  __visc__bindIn(var_137, 483, 9, 0); 
-
-  void* var_138 = __visc__createNodeND(0, var_138_node); 
-
-  __visc__edge(var_137, var_138, 1, 0, 0, 0); 
-  __visc__edge(var_137, var_138, 1, 1, 1, 0); 
-
-  void* var_139 = __visc__createNodeND(0, var_139_node); 
-
-  __visc__edge(var_138, var_139, 1, 0, 0, 0); 
-  __visc__edge(var_138, var_139, 1, 1, 1, 0); 
-  __visc__bindIn(var_139, 484, 2, 0); 
-  __visc__bindIn(var_139, 485, 3, 0); 
-
-  void* var_140 = __visc__createNodeND(0, var_140_node); 
-
-  __visc__edge(var_139, var_140, 1, 0, 0, 0); 
-  __visc__edge(var_139, var_140, 1, 1, 1, 0); 
-  __visc__bindIn(var_140, 486, 2, 0); 
-  __visc__bindIn(var_140, 487, 3, 0); 
-  __visc__bindIn(var_140, 488, 4, 0); 
-  __visc__bindIn(var_140, 489, 5, 0); 
-  __visc__bindIn(var_140, 490, 6, 0); 
-  __visc__bindIn(var_140, 491, 7, 0); 
-  __visc__bindIn(var_140, 492, 8, 0); 
-  __visc__bindIn(var_140, 493, 9, 0); 
-
-  void* var_141 = __visc__createNodeND(0, var_141_node); 
-
-  __visc__edge(var_140, var_141, 1, 0, 0, 0); 
-  __visc__edge(var_140, var_141, 1, 1, 1, 0); 
-
-  void* var_142 = __visc__createNodeND(0, var_142_node); 
-
-  __visc__edge(var_141, var_142, 1, 0, 0, 0); 
-  __visc__edge(var_141, var_142, 1, 1, 1, 0); 
-  __visc__bindIn(var_142, 494, 2, 0); 
-  __visc__bindIn(var_142, 495, 3, 0); 
-
-  void* var_143 = __visc__createNodeND(0, var_143_node); 
-
-  __visc__edge(var_142, var_143, 1, 0, 0, 0); 
-  __visc__edge(var_142, var_143, 1, 1, 1, 0); 
-  __visc__bindIn(var_143, 496, 2, 0); 
-  __visc__bindIn(var_143, 497, 3, 0); 
-  __visc__bindIn(var_143, 498, 4, 0); 
-  __visc__bindIn(var_143, 499, 5, 0); 
-  __visc__bindIn(var_143, 500, 6, 0); 
-  __visc__bindIn(var_143, 501, 7, 0); 
-  __visc__bindIn(var_143, 502, 8, 0); 
-  __visc__bindIn(var_143, 503, 9, 0); 
-
-  void* var_144 = __visc__createNodeND(0, var_144_node); 
-
-  __visc__edge(var_143, var_144, 1, 0, 0, 0); 
-  __visc__edge(var_143, var_144, 1, 1, 1, 0); 
-  __visc__bindIn(var_144, 504, 2, 0); 
-  __visc__bindIn(var_144, 505, 3, 0); 
-
-  void* var_145 = __visc__createNodeND(0, var_145_node); 
-
-  __visc__edge(var_144, var_145, 1, 0, 0, 0); 
-  __visc__edge(var_144, var_145, 1, 1, 1, 0); 
-  __visc__bindIn(var_145, 506, 2, 0); 
-  __visc__bindIn(var_145, 507, 3, 0); 
-  __visc__bindIn(var_145, 508, 4, 0); 
-  __visc__bindIn(var_145, 509, 5, 0); 
-  __visc__bindIn(var_145, 510, 6, 0); 
-  __visc__bindIn(var_145, 511, 7, 0); 
-  __visc__bindIn(var_145, 512, 8, 0); 
-  __visc__bindIn(var_145, 513, 9, 0); 
-
-  void* var_146 = __visc__createNodeND(0, var_146_node); 
-
-  __visc__edge(var_145, var_146, 1, 0, 0, 0); 
-  __visc__edge(var_145, var_146, 1, 1, 1, 0); 
-
-  void* var_147 = __visc__createNodeND(0, var_147_node); 
-
-  __visc__edge(var_146, var_147, 1, 0, 0, 0); 
-  __visc__edge(var_146, var_147, 1, 1, 1, 0); 
-
-  void* var_148 = __visc__createNodeND(0, var_148_node); 
-
-  __visc__edge(var_147, var_148, 1, 0, 0, 0); 
-  __visc__edge(var_147, var_148, 1, 1, 1, 0); 
-  __visc__bindIn(var_148, 514, 2, 0); 
-  __visc__bindIn(var_148, 515, 3, 0); 
-
-  void* var_149 = __visc__createNodeND(0, var_149_node); 
-
-  __visc__edge(var_148, var_149, 1, 0, 0, 0); 
-  __visc__edge(var_148, var_149, 1, 1, 1, 0); 
-  __visc__bindIn(var_149, 516, 2, 0); 
-  __visc__bindIn(var_149, 517, 3, 0); 
-
-  void* var_150 = __visc__createNodeND(0, var_150_node); 
-
-  __visc__edge(var_149, var_150, 1, 0, 0, 0); 
-  __visc__edge(var_149, var_150, 1, 1, 1, 0); 
-
-  __visc__bindOut(var_150, 0, 0, 0); 
-  __visc__bindOut(var_150, 1, 1, 0); 
-
-}
-
-struct ret_t {
-  void* tensor; 
-  size_t bytes; 
-}; 
-
-typedef struct __attribute__((__packed__)) {
-  void* input; 
-  size_t input_bytes; 
-  void* conv2d_1_w; 
-  size_t conv2d_1_w_bytes; 
-  void* depthwise_conv2d_1_w; 
-  size_t depthwise_conv2d_1_w_bytes; 
-  void* batch_normalization_1_gamma; 
-  size_t batch_normalization_1_gamma_bytes; 
-  void* batch_normalization_1_beta; 
-  size_t batch_normalization_1_beta_bytes; 
-  void* batch_normalization_1_mean; 
-  size_t batch_normalization_1_mean_bytes; 
-  void* batch_normalization_1_variance; 
-  size_t batch_normalization_1_variance_bytes; 
-  void* conv2d_2_w; 
-  size_t conv2d_2_w_bytes; 
-  void* batch_normalization_2_gamma; 
-  size_t batch_normalization_2_gamma_bytes; 
-  void* batch_normalization_2_beta; 
-  size_t batch_normalization_2_beta_bytes; 
-  void* batch_normalization_2_mean; 
-  size_t batch_normalization_2_mean_bytes; 
-  void* batch_normalization_2_variance; 
-  size_t batch_normalization_2_variance_bytes; 
-  void* conv2d_3_w; 
-  size_t conv2d_3_w_bytes; 
-  void* batch_normalization_3_gamma; 
-  size_t batch_normalization_3_gamma_bytes; 
-  void* batch_normalization_3_beta; 
-  size_t batch_normalization_3_beta_bytes; 
-  void* batch_normalization_3_mean; 
-  size_t batch_normalization_3_mean_bytes; 
-  void* batch_normalization_3_variance; 
-  size_t batch_normalization_3_variance_bytes; 
-  void* depthwise_conv2d_2_w; 
-  size_t depthwise_conv2d_2_w_bytes; 
-  void* batch_normalization_4_gamma; 
-  size_t batch_normalization_4_gamma_bytes; 
-  void* batch_normalization_4_beta; 
-  size_t batch_normalization_4_beta_bytes; 
-  void* batch_normalization_4_mean; 
-  size_t batch_normalization_4_mean_bytes; 
-  void* batch_normalization_4_variance; 
-  size_t batch_normalization_4_variance_bytes; 
-  void* conv2d_4_w; 
-  size_t conv2d_4_w_bytes; 
-  void* batch_normalization_5_gamma; 
-  size_t batch_normalization_5_gamma_bytes; 
-  void* batch_normalization_5_beta; 
-  size_t batch_normalization_5_beta_bytes; 
-  void* batch_normalization_5_mean; 
-  size_t batch_normalization_5_mean_bytes; 
-  void* batch_normalization_5_variance; 
-  size_t batch_normalization_5_variance_bytes; 
-  void* conv2d_5_w; 
-  size_t conv2d_5_w_bytes; 
-  void* batch_normalization_6_gamma; 
-  size_t batch_normalization_6_gamma_bytes; 
-  void* batch_normalization_6_beta; 
-  size_t batch_normalization_6_beta_bytes; 
-  void* batch_normalization_6_mean; 
-  size_t batch_normalization_6_mean_bytes; 
-  void* batch_normalization_6_variance; 
-  size_t batch_normalization_6_variance_bytes; 
-  void* depthwise_conv2d_3_w; 
-  size_t depthwise_conv2d_3_w_bytes; 
-  void* batch_normalization_7_gamma; 
-  size_t batch_normalization_7_gamma_bytes; 
-  void* batch_normalization_7_beta; 
-  size_t batch_normalization_7_beta_bytes; 
-  void* batch_normalization_7_mean; 
-  size_t batch_normalization_7_mean_bytes; 
-  void* batch_normalization_7_variance; 
-  size_t batch_normalization_7_variance_bytes; 
-  void* conv2d_6_w; 
-  size_t conv2d_6_w_bytes; 
-  void* batch_normalization_8_gamma; 
-  size_t batch_normalization_8_gamma_bytes; 
-  void* batch_normalization_8_beta; 
-  size_t batch_normalization_8_beta_bytes; 
-  void* batch_normalization_8_mean; 
-  size_t batch_normalization_8_mean_bytes; 
-  void* batch_normalization_8_variance; 
-  size_t batch_normalization_8_variance_bytes; 
-  void* conv2d_7_w; 
-  size_t conv2d_7_w_bytes; 
-  void* batch_normalization_9_gamma; 
-  size_t batch_normalization_9_gamma_bytes; 
-  void* batch_normalization_9_beta; 
-  size_t batch_normalization_9_beta_bytes; 
-  void* batch_normalization_9_mean; 
-  size_t batch_normalization_9_mean_bytes; 
-  void* batch_normalization_9_variance; 
-  size_t batch_normalization_9_variance_bytes; 
-  void* depthwise_conv2d_4_w; 
-  size_t depthwise_conv2d_4_w_bytes; 
-  void* batch_normalization_10_gamma; 
-  size_t batch_normalization_10_gamma_bytes; 
-  void* batch_normalization_10_beta; 
-  size_t batch_normalization_10_beta_bytes; 
-  void* batch_normalization_10_mean; 
-  size_t batch_normalization_10_mean_bytes; 
-  void* batch_normalization_10_variance; 
-  size_t batch_normalization_10_variance_bytes; 
-  void* conv2d_8_w; 
-  size_t conv2d_8_w_bytes; 
-  void* batch_normalization_11_gamma; 
-  size_t batch_normalization_11_gamma_bytes; 
-  void* batch_normalization_11_beta; 
-  size_t batch_normalization_11_beta_bytes; 
-  void* batch_normalization_11_mean; 
-  size_t batch_normalization_11_mean_bytes; 
-  void* batch_normalization_11_variance; 
-  size_t batch_normalization_11_variance_bytes; 
-  void* conv2d_9_w; 
-  size_t conv2d_9_w_bytes; 
-  void* batch_normalization_12_gamma; 
-  size_t batch_normalization_12_gamma_bytes; 
-  void* batch_normalization_12_beta; 
-  size_t batch_normalization_12_beta_bytes; 
-  void* batch_normalization_12_mean; 
-  size_t batch_normalization_12_mean_bytes; 
-  void* batch_normalization_12_variance; 
-  size_t batch_normalization_12_variance_bytes; 
-  void* depthwise_conv2d_5_w; 
-  size_t depthwise_conv2d_5_w_bytes; 
-  void* batch_normalization_13_gamma; 
-  size_t batch_normalization_13_gamma_bytes; 
-  void* batch_normalization_13_beta; 
-  size_t batch_normalization_13_beta_bytes; 
-  void* batch_normalization_13_mean; 
-  size_t batch_normalization_13_mean_bytes; 
-  void* batch_normalization_13_variance; 
-  size_t batch_normalization_13_variance_bytes; 
-  void* conv2d_10_w; 
-  size_t conv2d_10_w_bytes; 
-  void* batch_normalization_14_gamma; 
-  size_t batch_normalization_14_gamma_bytes; 
-  void* batch_normalization_14_beta; 
-  size_t batch_normalization_14_beta_bytes; 
-  void* batch_normalization_14_mean; 
-  size_t batch_normalization_14_mean_bytes; 
-  void* batch_normalization_14_variance; 
-  size_t batch_normalization_14_variance_bytes; 
-  void* conv2d_11_w; 
-  size_t conv2d_11_w_bytes; 
-  void* batch_normalization_15_gamma; 
-  size_t batch_normalization_15_gamma_bytes; 
-  void* batch_normalization_15_beta; 
-  size_t batch_normalization_15_beta_bytes; 
-  void* batch_normalization_15_mean; 
-  size_t batch_normalization_15_mean_bytes; 
-  void* batch_normalization_15_variance; 
-  size_t batch_normalization_15_variance_bytes; 
-  void* depthwise_conv2d_6_w; 
-  size_t depthwise_conv2d_6_w_bytes; 
-  void* batch_normalization_16_gamma; 
-  size_t batch_normalization_16_gamma_bytes; 
-  void* batch_normalization_16_beta; 
-  size_t batch_normalization_16_beta_bytes; 
-  void* batch_normalization_16_mean; 
-  size_t batch_normalization_16_mean_bytes; 
-  void* batch_normalization_16_variance; 
-  size_t batch_normalization_16_variance_bytes; 
-  void* conv2d_12_w; 
-  size_t conv2d_12_w_bytes; 
-  void* batch_normalization_17_gamma; 
-  size_t batch_normalization_17_gamma_bytes; 
-  void* batch_normalization_17_beta; 
-  size_t batch_normalization_17_beta_bytes; 
-  void* batch_normalization_17_mean; 
-  size_t batch_normalization_17_mean_bytes; 
-  void* batch_normalization_17_variance; 
-  size_t batch_normalization_17_variance_bytes; 
-  void* conv2d_13_w; 
-  size_t conv2d_13_w_bytes; 
-  void* batch_normalization_18_gamma; 
-  size_t batch_normalization_18_gamma_bytes; 
-  void* batch_normalization_18_beta; 
-  size_t batch_normalization_18_beta_bytes; 
-  void* batch_normalization_18_mean; 
-  size_t batch_normalization_18_mean_bytes; 
-  void* batch_normalization_18_variance; 
-  size_t batch_normalization_18_variance_bytes; 
-  void* depthwise_conv2d_7_w; 
-  size_t depthwise_conv2d_7_w_bytes; 
-  void* batch_normalization_19_gamma; 
-  size_t batch_normalization_19_gamma_bytes; 
-  void* batch_normalization_19_beta; 
-  size_t batch_normalization_19_beta_bytes; 
-  void* batch_normalization_19_mean; 
-  size_t batch_normalization_19_mean_bytes; 
-  void* batch_normalization_19_variance; 
-  size_t batch_normalization_19_variance_bytes; 
-  void* conv2d_14_w; 
-  size_t conv2d_14_w_bytes; 
-  void* batch_normalization_20_gamma; 
-  size_t batch_normalization_20_gamma_bytes; 
-  void* batch_normalization_20_beta; 
-  size_t batch_normalization_20_beta_bytes; 
-  void* batch_normalization_20_mean; 
-  size_t batch_normalization_20_mean_bytes; 
-  void* batch_normalization_20_variance; 
-  size_t batch_normalization_20_variance_bytes; 
-  void* conv2d_15_w; 
-  size_t conv2d_15_w_bytes; 
-  void* batch_normalization_21_gamma; 
-  size_t batch_normalization_21_gamma_bytes; 
-  void* batch_normalization_21_beta; 
-  size_t batch_normalization_21_beta_bytes; 
-  void* batch_normalization_21_mean; 
-  size_t batch_normalization_21_mean_bytes; 
-  void* batch_normalization_21_variance; 
-  size_t batch_normalization_21_variance_bytes; 
-  void* depthwise_conv2d_8_w; 
-  size_t depthwise_conv2d_8_w_bytes; 
-  void* batch_normalization_22_gamma; 
-  size_t batch_normalization_22_gamma_bytes; 
-  void* batch_normalization_22_beta; 
-  size_t batch_normalization_22_beta_bytes; 
-  void* batch_normalization_22_mean; 
-  size_t batch_normalization_22_mean_bytes; 
-  void* batch_normalization_22_variance; 
-  size_t batch_normalization_22_variance_bytes; 
-  void* conv2d_16_w; 
-  size_t conv2d_16_w_bytes; 
-  void* batch_normalization_23_gamma; 
-  size_t batch_normalization_23_gamma_bytes; 
-  void* batch_normalization_23_beta; 
-  size_t batch_normalization_23_beta_bytes; 
-  void* batch_normalization_23_mean; 
-  size_t batch_normalization_23_mean_bytes; 
-  void* batch_normalization_23_variance; 
-  size_t batch_normalization_23_variance_bytes; 
-  void* conv2d_17_w; 
-  size_t conv2d_17_w_bytes; 
-  void* batch_normalization_24_gamma; 
-  size_t batch_normalization_24_gamma_bytes; 
-  void* batch_normalization_24_beta; 
-  size_t batch_normalization_24_beta_bytes; 
-  void* batch_normalization_24_mean; 
-  size_t batch_normalization_24_mean_bytes; 
-  void* batch_normalization_24_variance; 
-  size_t batch_normalization_24_variance_bytes; 
-  void* depthwise_conv2d_9_w; 
-  size_t depthwise_conv2d_9_w_bytes; 
-  void* batch_normalization_25_gamma; 
-  size_t batch_normalization_25_gamma_bytes; 
-  void* batch_normalization_25_beta; 
-  size_t batch_normalization_25_beta_bytes; 
-  void* batch_normalization_25_mean; 
-  size_t batch_normalization_25_mean_bytes; 
-  void* batch_normalization_25_variance; 
-  size_t batch_normalization_25_variance_bytes; 
-  void* conv2d_18_w; 
-  size_t conv2d_18_w_bytes; 
-  void* batch_normalization_26_gamma; 
-  size_t batch_normalization_26_gamma_bytes; 
-  void* batch_normalization_26_beta; 
-  size_t batch_normalization_26_beta_bytes; 
-  void* batch_normalization_26_mean; 
-  size_t batch_normalization_26_mean_bytes; 
-  void* batch_normalization_26_variance; 
-  size_t batch_normalization_26_variance_bytes; 
-  void* conv2d_19_w; 
-  size_t conv2d_19_w_bytes; 
-  void* batch_normalization_27_gamma; 
-  size_t batch_normalization_27_gamma_bytes; 
-  void* batch_normalization_27_beta; 
-  size_t batch_normalization_27_beta_bytes; 
-  void* batch_normalization_27_mean; 
-  size_t batch_normalization_27_mean_bytes; 
-  void* batch_normalization_27_variance; 
-  size_t batch_normalization_27_variance_bytes; 
-  void* depthwise_conv2d_10_w; 
-  size_t depthwise_conv2d_10_w_bytes; 
-  void* batch_normalization_28_gamma; 
-  size_t batch_normalization_28_gamma_bytes; 
-  void* batch_normalization_28_beta; 
-  size_t batch_normalization_28_beta_bytes; 
-  void* batch_normalization_28_mean; 
-  size_t batch_normalization_28_mean_bytes; 
-  void* batch_normalization_28_variance; 
-  size_t batch_normalization_28_variance_bytes; 
-  void* conv2d_20_w; 
-  size_t conv2d_20_w_bytes; 
-  void* batch_normalization_29_gamma; 
-  size_t batch_normalization_29_gamma_bytes; 
-  void* batch_normalization_29_beta; 
-  size_t batch_normalization_29_beta_bytes; 
-  void* batch_normalization_29_mean; 
-  size_t batch_normalization_29_mean_bytes; 
-  void* batch_normalization_29_variance; 
-  size_t batch_normalization_29_variance_bytes; 
-  void* conv2d_21_w; 
-  size_t conv2d_21_w_bytes; 
-  void* batch_normalization_30_gamma; 
-  size_t batch_normalization_30_gamma_bytes; 
-  void* batch_normalization_30_beta; 
-  size_t batch_normalization_30_beta_bytes; 
-  void* batch_normalization_30_mean; 
-  size_t batch_normalization_30_mean_bytes; 
-  void* batch_normalization_30_variance; 
-  size_t batch_normalization_30_variance_bytes; 
-  void* depthwise_conv2d_11_w; 
-  size_t depthwise_conv2d_11_w_bytes; 
-  void* batch_normalization_31_gamma; 
-  size_t batch_normalization_31_gamma_bytes; 
-  void* batch_normalization_31_beta; 
-  size_t batch_normalization_31_beta_bytes; 
-  void* batch_normalization_31_mean; 
-  size_t batch_normalization_31_mean_bytes; 
-  void* batch_normalization_31_variance; 
-  size_t batch_normalization_31_variance_bytes; 
-  void* conv2d_22_w; 
-  size_t conv2d_22_w_bytes; 
-  void* batch_normalization_32_gamma; 
-  size_t batch_normalization_32_gamma_bytes; 
-  void* batch_normalization_32_beta; 
-  size_t batch_normalization_32_beta_bytes; 
-  void* batch_normalization_32_mean; 
-  size_t batch_normalization_32_mean_bytes; 
-  void* batch_normalization_32_variance; 
-  size_t batch_normalization_32_variance_bytes; 
-  void* conv2d_23_w; 
-  size_t conv2d_23_w_bytes; 
-  void* batch_normalization_33_gamma; 
-  size_t batch_normalization_33_gamma_bytes; 
-  void* batch_normalization_33_beta; 
-  size_t batch_normalization_33_beta_bytes; 
-  void* batch_normalization_33_mean; 
-  size_t batch_normalization_33_mean_bytes; 
-  void* batch_normalization_33_variance; 
-  size_t batch_normalization_33_variance_bytes; 
-  void* depthwise_conv2d_12_w; 
-  size_t depthwise_conv2d_12_w_bytes; 
-  void* batch_normalization_34_gamma; 
-  size_t batch_normalization_34_gamma_bytes; 
-  void* batch_normalization_34_beta; 
-  size_t batch_normalization_34_beta_bytes; 
-  void* batch_normalization_34_mean; 
-  size_t batch_normalization_34_mean_bytes; 
-  void* batch_normalization_34_variance; 
-  size_t batch_normalization_34_variance_bytes; 
-  void* conv2d_24_w; 
-  size_t conv2d_24_w_bytes; 
-  void* batch_normalization_35_gamma; 
-  size_t batch_normalization_35_gamma_bytes; 
-  void* batch_normalization_35_beta; 
-  size_t batch_normalization_35_beta_bytes; 
-  void* batch_normalization_35_mean; 
-  size_t batch_normalization_35_mean_bytes; 
-  void* batch_normalization_35_variance; 
-  size_t batch_normalization_35_variance_bytes; 
-  void* conv2d_25_w; 
-  size_t conv2d_25_w_bytes; 
-  void* batch_normalization_36_gamma; 
-  size_t batch_normalization_36_gamma_bytes; 
-  void* batch_normalization_36_beta; 
-  size_t batch_normalization_36_beta_bytes; 
-  void* batch_normalization_36_mean; 
-  size_t batch_normalization_36_mean_bytes; 
-  void* batch_normalization_36_variance; 
-  size_t batch_normalization_36_variance_bytes; 
-  void* depthwise_conv2d_13_w; 
-  size_t depthwise_conv2d_13_w_bytes; 
-  void* batch_normalization_37_gamma; 
-  size_t batch_normalization_37_gamma_bytes; 
-  void* batch_normalization_37_beta; 
-  size_t batch_normalization_37_beta_bytes; 
-  void* batch_normalization_37_mean; 
-  size_t batch_normalization_37_mean_bytes; 
-  void* batch_normalization_37_variance; 
-  size_t batch_normalization_37_variance_bytes; 
-  void* conv2d_26_w; 
-  size_t conv2d_26_w_bytes; 
-  void* batch_normalization_38_gamma; 
-  size_t batch_normalization_38_gamma_bytes; 
-  void* batch_normalization_38_beta; 
-  size_t batch_normalization_38_beta_bytes; 
-  void* batch_normalization_38_mean; 
-  size_t batch_normalization_38_mean_bytes; 
-  void* batch_normalization_38_variance; 
-  size_t batch_normalization_38_variance_bytes; 
-  void* conv2d_27_w; 
-  size_t conv2d_27_w_bytes; 
-  void* batch_normalization_39_gamma; 
-  size_t batch_normalization_39_gamma_bytes; 
-  void* batch_normalization_39_beta; 
-  size_t batch_normalization_39_beta_bytes; 
-  void* batch_normalization_39_mean; 
-  size_t batch_normalization_39_mean_bytes; 
-  void* batch_normalization_39_variance; 
-  size_t batch_normalization_39_variance_bytes; 
-  void* depthwise_conv2d_14_w; 
-  size_t depthwise_conv2d_14_w_bytes; 
-  void* batch_normalization_40_gamma; 
-  size_t batch_normalization_40_gamma_bytes; 
-  void* batch_normalization_40_beta; 
-  size_t batch_normalization_40_beta_bytes; 
-  void* batch_normalization_40_mean; 
-  size_t batch_normalization_40_mean_bytes; 
-  void* batch_normalization_40_variance; 
-  size_t batch_normalization_40_variance_bytes; 
-  void* conv2d_28_w; 
-  size_t conv2d_28_w_bytes; 
-  void* batch_normalization_41_gamma; 
-  size_t batch_normalization_41_gamma_bytes; 
-  void* batch_normalization_41_beta; 
-  size_t batch_normalization_41_beta_bytes; 
-  void* batch_normalization_41_mean; 
-  size_t batch_normalization_41_mean_bytes; 
-  void* batch_normalization_41_variance; 
-  size_t batch_normalization_41_variance_bytes; 
-  void* conv2d_29_w; 
-  size_t conv2d_29_w_bytes; 
-  void* batch_normalization_42_gamma; 
-  size_t batch_normalization_42_gamma_bytes; 
-  void* batch_normalization_42_beta; 
-  size_t batch_normalization_42_beta_bytes; 
-  void* batch_normalization_42_mean; 
-  size_t batch_normalization_42_mean_bytes; 
-  void* batch_normalization_42_variance; 
-  size_t batch_normalization_42_variance_bytes; 
-  void* depthwise_conv2d_15_w; 
-  size_t depthwise_conv2d_15_w_bytes; 
-  void* batch_normalization_43_gamma; 
-  size_t batch_normalization_43_gamma_bytes; 
-  void* batch_normalization_43_beta; 
-  size_t batch_normalization_43_beta_bytes; 
-  void* batch_normalization_43_mean; 
-  size_t batch_normalization_43_mean_bytes; 
-  void* batch_normalization_43_variance; 
-  size_t batch_normalization_43_variance_bytes; 
-  void* conv2d_30_w; 
-  size_t conv2d_30_w_bytes; 
-  void* batch_normalization_44_gamma; 
-  size_t batch_normalization_44_gamma_bytes; 
-  void* batch_normalization_44_beta; 
-  size_t batch_normalization_44_beta_bytes; 
-  void* batch_normalization_44_mean; 
-  size_t batch_normalization_44_mean_bytes; 
-  void* batch_normalization_44_variance; 
-  size_t batch_normalization_44_variance_bytes; 
-  void* conv2d_31_w; 
-  size_t conv2d_31_w_bytes; 
-  void* batch_normalization_45_gamma; 
-  size_t batch_normalization_45_gamma_bytes; 
-  void* batch_normalization_45_beta; 
-  size_t batch_normalization_45_beta_bytes; 
-  void* batch_normalization_45_mean; 
-  size_t batch_normalization_45_mean_bytes; 
-  void* batch_normalization_45_variance; 
-  size_t batch_normalization_45_variance_bytes; 
-  void* depthwise_conv2d_16_w; 
-  size_t depthwise_conv2d_16_w_bytes; 
-  void* batch_normalization_46_gamma; 
-  size_t batch_normalization_46_gamma_bytes; 
-  void* batch_normalization_46_beta; 
-  size_t batch_normalization_46_beta_bytes; 
-  void* batch_normalization_46_mean; 
-  size_t batch_normalization_46_mean_bytes; 
-  void* batch_normalization_46_variance; 
-  size_t batch_normalization_46_variance_bytes; 
-  void* conv2d_32_w; 
-  size_t conv2d_32_w_bytes; 
-  void* batch_normalization_47_gamma; 
-  size_t batch_normalization_47_gamma_bytes; 
-  void* batch_normalization_47_beta; 
-  size_t batch_normalization_47_beta_bytes; 
-  void* batch_normalization_47_mean; 
-  size_t batch_normalization_47_mean_bytes; 
-  void* batch_normalization_47_variance; 
-  size_t batch_normalization_47_variance_bytes; 
-  void* conv2d_33_w; 
-  size_t conv2d_33_w_bytes; 
-  void* batch_normalization_48_gamma; 
-  size_t batch_normalization_48_gamma_bytes; 
-  void* batch_normalization_48_beta; 
-  size_t batch_normalization_48_beta_bytes; 
-  void* batch_normalization_48_mean; 
-  size_t batch_normalization_48_mean_bytes; 
-  void* batch_normalization_48_variance; 
-  size_t batch_normalization_48_variance_bytes; 
-  void* depthwise_conv2d_17_w; 
-  size_t depthwise_conv2d_17_w_bytes; 
-  void* batch_normalization_49_gamma; 
-  size_t batch_normalization_49_gamma_bytes; 
-  void* batch_normalization_49_beta; 
-  size_t batch_normalization_49_beta_bytes; 
-  void* batch_normalization_49_mean; 
-  size_t batch_normalization_49_mean_bytes; 
-  void* batch_normalization_49_variance; 
-  size_t batch_normalization_49_variance_bytes; 
-  void* conv2d_34_w; 
-  size_t conv2d_34_w_bytes; 
-  void* batch_normalization_50_gamma; 
-  size_t batch_normalization_50_gamma_bytes; 
-  void* batch_normalization_50_beta; 
-  size_t batch_normalization_50_beta_bytes; 
-  void* batch_normalization_50_mean; 
-  size_t batch_normalization_50_mean_bytes; 
-  void* batch_normalization_50_variance; 
-  size_t batch_normalization_50_variance_bytes; 
-  void* conv2d_35_w; 
-  size_t conv2d_35_w_bytes; 
-  void* batch_normalization_51_gamma; 
-  size_t batch_normalization_51_gamma_bytes; 
-  void* batch_normalization_51_beta; 
-  size_t batch_normalization_51_beta_bytes; 
-  void* batch_normalization_51_mean; 
-  size_t batch_normalization_51_mean_bytes; 
-  void* batch_normalization_51_variance; 
-  size_t batch_normalization_51_variance_bytes; 
-  void* dense_1_w; 
-  size_t dense_1_w_bytes; 
-  void* dense_1_b; 
-  size_t dense_1_b_bytes; 
-
-  struct ret_t r; 
-}
-RootIn;
-
-int main(){ 
-
-std::string dir_prefix = std::string("data/mobilenetv2_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,32,1,1); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,16,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,16,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,16,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,96,16,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,96,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,96,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,24,96,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,24,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,144,24,1,1); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,144,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,144,1,3,3); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,144,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,24,144,1,1); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,24,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,144,24,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,144,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,144,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,144,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,144,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,192,32,1,1); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,192,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,192,1,3,3); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,192,1,1); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,192,32,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,192,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,192,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,192,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,192,32,1,1); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,192,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,192,1,3,3); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,64,192,1,1); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,384,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,384,1,1); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_28_gamma_path =  dir_prefix + std::string("batch_normalization_28_gamma.bin"); 
-void* batch_normalization_28_gamma =  readTrainedWeights(batch_normalization_28_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_28_beta_path =  dir_prefix + std::string("batch_normalization_28_beta.bin"); 
-void* batch_normalization_28_beta =  readTrainedWeights(batch_normalization_28_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_28_mean_path =  dir_prefix + std::string("batch_normalization_28_mean.bin"); 
-void* batch_normalization_28_mean =  readTrainedWeights(batch_normalization_28_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_28_variance_path =  dir_prefix + std::string("batch_normalization_28_variance.bin"); 
-void* batch_normalization_28_variance =  readTrainedWeights(batch_normalization_28_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,384,1,1); 
-std::string batch_normalization_29_gamma_path =  dir_prefix + std::string("batch_normalization_29_gamma.bin"); 
-void* batch_normalization_29_gamma =  readTrainedWeights(batch_normalization_29_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_29_beta_path =  dir_prefix + std::string("batch_normalization_29_beta.bin"); 
-void* batch_normalization_29_beta =  readTrainedWeights(batch_normalization_29_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_29_mean_path =  dir_prefix + std::string("batch_normalization_29_mean.bin"); 
-void* batch_normalization_29_mean =  readTrainedWeights(batch_normalization_29_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_29_variance_path =  dir_prefix + std::string("batch_normalization_29_variance.bin"); 
-void* batch_normalization_29_variance =  readTrainedWeights(batch_normalization_29_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_30_gamma_path =  dir_prefix + std::string("batch_normalization_30_gamma.bin"); 
-void* batch_normalization_30_gamma =  readTrainedWeights(batch_normalization_30_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_30_beta_path =  dir_prefix + std::string("batch_normalization_30_beta.bin"); 
-void* batch_normalization_30_beta =  readTrainedWeights(batch_normalization_30_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_30_mean_path =  dir_prefix + std::string("batch_normalization_30_mean.bin"); 
-void* batch_normalization_30_mean =  readTrainedWeights(batch_normalization_30_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_30_variance_path =  dir_prefix + std::string("batch_normalization_30_variance.bin"); 
-void* batch_normalization_30_variance =  readTrainedWeights(batch_normalization_30_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_31_gamma_path =  dir_prefix + std::string("batch_normalization_31_gamma.bin"); 
-void* batch_normalization_31_gamma =  readTrainedWeights(batch_normalization_31_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_31_beta_path =  dir_prefix + std::string("batch_normalization_31_beta.bin"); 
-void* batch_normalization_31_beta =  readTrainedWeights(batch_normalization_31_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_31_mean_path =  dir_prefix + std::string("batch_normalization_31_mean.bin"); 
-void* batch_normalization_31_mean =  readTrainedWeights(batch_normalization_31_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_31_variance_path =  dir_prefix + std::string("batch_normalization_31_variance.bin"); 
-void* batch_normalization_31_variance =  readTrainedWeights(batch_normalization_31_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_22_w_path =  dir_prefix + std::string("conv2d_22_w.bin"); 
-void* conv2d_22_w =  readTrainedWeights(conv2d_22_w_path.c_str(), 0,96,384,1,1); 
-std::string batch_normalization_32_gamma_path =  dir_prefix + std::string("batch_normalization_32_gamma.bin"); 
-void* batch_normalization_32_gamma =  readTrainedWeights(batch_normalization_32_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_32_beta_path =  dir_prefix + std::string("batch_normalization_32_beta.bin"); 
-void* batch_normalization_32_beta =  readTrainedWeights(batch_normalization_32_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_32_mean_path =  dir_prefix + std::string("batch_normalization_32_mean.bin"); 
-void* batch_normalization_32_mean =  readTrainedWeights(batch_normalization_32_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_32_variance_path =  dir_prefix + std::string("batch_normalization_32_variance.bin"); 
-void* batch_normalization_32_variance =  readTrainedWeights(batch_normalization_32_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_23_w_path =  dir_prefix + std::string("conv2d_23_w.bin"); 
-void* conv2d_23_w =  readTrainedWeights(conv2d_23_w_path.c_str(), 0,576,96,1,1); 
-std::string batch_normalization_33_gamma_path =  dir_prefix + std::string("batch_normalization_33_gamma.bin"); 
-void* batch_normalization_33_gamma =  readTrainedWeights(batch_normalization_33_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_33_beta_path =  dir_prefix + std::string("batch_normalization_33_beta.bin"); 
-void* batch_normalization_33_beta =  readTrainedWeights(batch_normalization_33_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_33_mean_path =  dir_prefix + std::string("batch_normalization_33_mean.bin"); 
-void* batch_normalization_33_mean =  readTrainedWeights(batch_normalization_33_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_33_variance_path =  dir_prefix + std::string("batch_normalization_33_variance.bin"); 
-void* batch_normalization_33_variance =  readTrainedWeights(batch_normalization_33_variance_path.c_str(), 0,1,576,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,576,1,3,3); 
-std::string batch_normalization_34_gamma_path =  dir_prefix + std::string("batch_normalization_34_gamma.bin"); 
-void* batch_normalization_34_gamma =  readTrainedWeights(batch_normalization_34_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_34_beta_path =  dir_prefix + std::string("batch_normalization_34_beta.bin"); 
-void* batch_normalization_34_beta =  readTrainedWeights(batch_normalization_34_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_34_mean_path =  dir_prefix + std::string("batch_normalization_34_mean.bin"); 
-void* batch_normalization_34_mean =  readTrainedWeights(batch_normalization_34_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_34_variance_path =  dir_prefix + std::string("batch_normalization_34_variance.bin"); 
-void* batch_normalization_34_variance =  readTrainedWeights(batch_normalization_34_variance_path.c_str(), 0,1,576,1,1); 
-std::string conv2d_24_w_path =  dir_prefix + std::string("conv2d_24_w.bin"); 
-void* conv2d_24_w =  readTrainedWeights(conv2d_24_w_path.c_str(), 0,96,576,1,1); 
-std::string batch_normalization_35_gamma_path =  dir_prefix + std::string("batch_normalization_35_gamma.bin"); 
-void* batch_normalization_35_gamma =  readTrainedWeights(batch_normalization_35_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_35_beta_path =  dir_prefix + std::string("batch_normalization_35_beta.bin"); 
-void* batch_normalization_35_beta =  readTrainedWeights(batch_normalization_35_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_35_mean_path =  dir_prefix + std::string("batch_normalization_35_mean.bin"); 
-void* batch_normalization_35_mean =  readTrainedWeights(batch_normalization_35_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_35_variance_path =  dir_prefix + std::string("batch_normalization_35_variance.bin"); 
-void* batch_normalization_35_variance =  readTrainedWeights(batch_normalization_35_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_25_w_path =  dir_prefix + std::string("conv2d_25_w.bin"); 
-void* conv2d_25_w =  readTrainedWeights(conv2d_25_w_path.c_str(), 0,576,96,1,1); 
-std::string batch_normalization_36_gamma_path =  dir_prefix + std::string("batch_normalization_36_gamma.bin"); 
-void* batch_normalization_36_gamma =  readTrainedWeights(batch_normalization_36_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_36_beta_path =  dir_prefix + std::string("batch_normalization_36_beta.bin"); 
-void* batch_normalization_36_beta =  readTrainedWeights(batch_normalization_36_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_36_mean_path =  dir_prefix + std::string("batch_normalization_36_mean.bin"); 
-void* batch_normalization_36_mean =  readTrainedWeights(batch_normalization_36_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_36_variance_path =  dir_prefix + std::string("batch_normalization_36_variance.bin"); 
-void* batch_normalization_36_variance =  readTrainedWeights(batch_normalization_36_variance_path.c_str(), 0,1,576,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,576,1,3,3); 
-std::string batch_normalization_37_gamma_path =  dir_prefix + std::string("batch_normalization_37_gamma.bin"); 
-void* batch_normalization_37_gamma =  readTrainedWeights(batch_normalization_37_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_37_beta_path =  dir_prefix + std::string("batch_normalization_37_beta.bin"); 
-void* batch_normalization_37_beta =  readTrainedWeights(batch_normalization_37_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_37_mean_path =  dir_prefix + std::string("batch_normalization_37_mean.bin"); 
-void* batch_normalization_37_mean =  readTrainedWeights(batch_normalization_37_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_37_variance_path =  dir_prefix + std::string("batch_normalization_37_variance.bin"); 
-void* batch_normalization_37_variance =  readTrainedWeights(batch_normalization_37_variance_path.c_str(), 0,1,576,1,1); 
-std::string conv2d_26_w_path =  dir_prefix + std::string("conv2d_26_w.bin"); 
-void* conv2d_26_w =  readTrainedWeights(conv2d_26_w_path.c_str(), 0,96,576,1,1); 
-std::string batch_normalization_38_gamma_path =  dir_prefix + std::string("batch_normalization_38_gamma.bin"); 
-void* batch_normalization_38_gamma =  readTrainedWeights(batch_normalization_38_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_38_beta_path =  dir_prefix + std::string("batch_normalization_38_beta.bin"); 
-void* batch_normalization_38_beta =  readTrainedWeights(batch_normalization_38_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_38_mean_path =  dir_prefix + std::string("batch_normalization_38_mean.bin"); 
-void* batch_normalization_38_mean =  readTrainedWeights(batch_normalization_38_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_38_variance_path =  dir_prefix + std::string("batch_normalization_38_variance.bin"); 
-void* batch_normalization_38_variance =  readTrainedWeights(batch_normalization_38_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_27_w_path =  dir_prefix + std::string("conv2d_27_w.bin"); 
-void* conv2d_27_w =  readTrainedWeights(conv2d_27_w_path.c_str(), 0,576,96,1,1); 
-std::string batch_normalization_39_gamma_path =  dir_prefix + std::string("batch_normalization_39_gamma.bin"); 
-void* batch_normalization_39_gamma =  readTrainedWeights(batch_normalization_39_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_39_beta_path =  dir_prefix + std::string("batch_normalization_39_beta.bin"); 
-void* batch_normalization_39_beta =  readTrainedWeights(batch_normalization_39_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_39_mean_path =  dir_prefix + std::string("batch_normalization_39_mean.bin"); 
-void* batch_normalization_39_mean =  readTrainedWeights(batch_normalization_39_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_39_variance_path =  dir_prefix + std::string("batch_normalization_39_variance.bin"); 
-void* batch_normalization_39_variance =  readTrainedWeights(batch_normalization_39_variance_path.c_str(), 0,1,576,1,1); 
-std::string depthwise_conv2d_14_w_path =  dir_prefix + std::string("depthwise_conv2d_14_w.bin"); 
-void* depthwise_conv2d_14_w =  readTrainedWeights(depthwise_conv2d_14_w_path.c_str(), 0,576,1,3,3); 
-std::string batch_normalization_40_gamma_path =  dir_prefix + std::string("batch_normalization_40_gamma.bin"); 
-void* batch_normalization_40_gamma =  readTrainedWeights(batch_normalization_40_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_40_beta_path =  dir_prefix + std::string("batch_normalization_40_beta.bin"); 
-void* batch_normalization_40_beta =  readTrainedWeights(batch_normalization_40_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_40_mean_path =  dir_prefix + std::string("batch_normalization_40_mean.bin"); 
-void* batch_normalization_40_mean =  readTrainedWeights(batch_normalization_40_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_40_variance_path =  dir_prefix + std::string("batch_normalization_40_variance.bin"); 
-void* batch_normalization_40_variance =  readTrainedWeights(batch_normalization_40_variance_path.c_str(), 0,1,576,1,1); 
-std::string conv2d_28_w_path =  dir_prefix + std::string("conv2d_28_w.bin"); 
-void* conv2d_28_w =  readTrainedWeights(conv2d_28_w_path.c_str(), 0,160,576,1,1); 
-std::string batch_normalization_41_gamma_path =  dir_prefix + std::string("batch_normalization_41_gamma.bin"); 
-void* batch_normalization_41_gamma =  readTrainedWeights(batch_normalization_41_gamma_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_41_beta_path =  dir_prefix + std::string("batch_normalization_41_beta.bin"); 
-void* batch_normalization_41_beta =  readTrainedWeights(batch_normalization_41_beta_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_41_mean_path =  dir_prefix + std::string("batch_normalization_41_mean.bin"); 
-void* batch_normalization_41_mean =  readTrainedWeights(batch_normalization_41_mean_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_41_variance_path =  dir_prefix + std::string("batch_normalization_41_variance.bin"); 
-void* batch_normalization_41_variance =  readTrainedWeights(batch_normalization_41_variance_path.c_str(), 0,1,160,1,1); 
-std::string conv2d_29_w_path =  dir_prefix + std::string("conv2d_29_w.bin"); 
-void* conv2d_29_w =  readTrainedWeights(conv2d_29_w_path.c_str(), 0,960,160,1,1); 
-std::string batch_normalization_42_gamma_path =  dir_prefix + std::string("batch_normalization_42_gamma.bin"); 
-void* batch_normalization_42_gamma =  readTrainedWeights(batch_normalization_42_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_42_beta_path =  dir_prefix + std::string("batch_normalization_42_beta.bin"); 
-void* batch_normalization_42_beta =  readTrainedWeights(batch_normalization_42_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_42_mean_path =  dir_prefix + std::string("batch_normalization_42_mean.bin"); 
-void* batch_normalization_42_mean =  readTrainedWeights(batch_normalization_42_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_42_variance_path =  dir_prefix + std::string("batch_normalization_42_variance.bin"); 
-void* batch_normalization_42_variance =  readTrainedWeights(batch_normalization_42_variance_path.c_str(), 0,1,960,1,1); 
-std::string depthwise_conv2d_15_w_path =  dir_prefix + std::string("depthwise_conv2d_15_w.bin"); 
-void* depthwise_conv2d_15_w =  readTrainedWeights(depthwise_conv2d_15_w_path.c_str(), 0,960,1,3,3); 
-std::string batch_normalization_43_gamma_path =  dir_prefix + std::string("batch_normalization_43_gamma.bin"); 
-void* batch_normalization_43_gamma =  readTrainedWeights(batch_normalization_43_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_43_beta_path =  dir_prefix + std::string("batch_normalization_43_beta.bin"); 
-void* batch_normalization_43_beta =  readTrainedWeights(batch_normalization_43_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_43_mean_path =  dir_prefix + std::string("batch_normalization_43_mean.bin"); 
-void* batch_normalization_43_mean =  readTrainedWeights(batch_normalization_43_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_43_variance_path =  dir_prefix + std::string("batch_normalization_43_variance.bin"); 
-void* batch_normalization_43_variance =  readTrainedWeights(batch_normalization_43_variance_path.c_str(), 0,1,960,1,1); 
-std::string conv2d_30_w_path =  dir_prefix + std::string("conv2d_30_w.bin"); 
-void* conv2d_30_w =  readTrainedWeights(conv2d_30_w_path.c_str(), 0,160,960,1,1); 
-std::string batch_normalization_44_gamma_path =  dir_prefix + std::string("batch_normalization_44_gamma.bin"); 
-void* batch_normalization_44_gamma =  readTrainedWeights(batch_normalization_44_gamma_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_44_beta_path =  dir_prefix + std::string("batch_normalization_44_beta.bin"); 
-void* batch_normalization_44_beta =  readTrainedWeights(batch_normalization_44_beta_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_44_mean_path =  dir_prefix + std::string("batch_normalization_44_mean.bin"); 
-void* batch_normalization_44_mean =  readTrainedWeights(batch_normalization_44_mean_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_44_variance_path =  dir_prefix + std::string("batch_normalization_44_variance.bin"); 
-void* batch_normalization_44_variance =  readTrainedWeights(batch_normalization_44_variance_path.c_str(), 0,1,160,1,1); 
-std::string conv2d_31_w_path =  dir_prefix + std::string("conv2d_31_w.bin"); 
-void* conv2d_31_w =  readTrainedWeights(conv2d_31_w_path.c_str(), 0,960,160,1,1); 
-std::string batch_normalization_45_gamma_path =  dir_prefix + std::string("batch_normalization_45_gamma.bin"); 
-void* batch_normalization_45_gamma =  readTrainedWeights(batch_normalization_45_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_45_beta_path =  dir_prefix + std::string("batch_normalization_45_beta.bin"); 
-void* batch_normalization_45_beta =  readTrainedWeights(batch_normalization_45_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_45_mean_path =  dir_prefix + std::string("batch_normalization_45_mean.bin"); 
-void* batch_normalization_45_mean =  readTrainedWeights(batch_normalization_45_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_45_variance_path =  dir_prefix + std::string("batch_normalization_45_variance.bin"); 
-void* batch_normalization_45_variance =  readTrainedWeights(batch_normalization_45_variance_path.c_str(), 0,1,960,1,1); 
-std::string depthwise_conv2d_16_w_path =  dir_prefix + std::string("depthwise_conv2d_16_w.bin"); 
-void* depthwise_conv2d_16_w =  readTrainedWeights(depthwise_conv2d_16_w_path.c_str(), 0,960,1,3,3); 
-std::string batch_normalization_46_gamma_path =  dir_prefix + std::string("batch_normalization_46_gamma.bin"); 
-void* batch_normalization_46_gamma =  readTrainedWeights(batch_normalization_46_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_46_beta_path =  dir_prefix + std::string("batch_normalization_46_beta.bin"); 
-void* batch_normalization_46_beta =  readTrainedWeights(batch_normalization_46_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_46_mean_path =  dir_prefix + std::string("batch_normalization_46_mean.bin"); 
-void* batch_normalization_46_mean =  readTrainedWeights(batch_normalization_46_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_46_variance_path =  dir_prefix + std::string("batch_normalization_46_variance.bin"); 
-void* batch_normalization_46_variance =  readTrainedWeights(batch_normalization_46_variance_path.c_str(), 0,1,960,1,1); 
-std::string conv2d_32_w_path =  dir_prefix + std::string("conv2d_32_w.bin"); 
-void* conv2d_32_w =  readTrainedWeights(conv2d_32_w_path.c_str(), 0,160,960,1,1); 
-std::string batch_normalization_47_gamma_path =  dir_prefix + std::string("batch_normalization_47_gamma.bin"); 
-void* batch_normalization_47_gamma =  readTrainedWeights(batch_normalization_47_gamma_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_47_beta_path =  dir_prefix + std::string("batch_normalization_47_beta.bin"); 
-void* batch_normalization_47_beta =  readTrainedWeights(batch_normalization_47_beta_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_47_mean_path =  dir_prefix + std::string("batch_normalization_47_mean.bin"); 
-void* batch_normalization_47_mean =  readTrainedWeights(batch_normalization_47_mean_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_47_variance_path =  dir_prefix + std::string("batch_normalization_47_variance.bin"); 
-void* batch_normalization_47_variance =  readTrainedWeights(batch_normalization_47_variance_path.c_str(), 0,1,160,1,1); 
-std::string conv2d_33_w_path =  dir_prefix + std::string("conv2d_33_w.bin"); 
-void* conv2d_33_w =  readTrainedWeights(conv2d_33_w_path.c_str(), 0,960,160,1,1); 
-std::string batch_normalization_48_gamma_path =  dir_prefix + std::string("batch_normalization_48_gamma.bin"); 
-void* batch_normalization_48_gamma =  readTrainedWeights(batch_normalization_48_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_48_beta_path =  dir_prefix + std::string("batch_normalization_48_beta.bin"); 
-void* batch_normalization_48_beta =  readTrainedWeights(batch_normalization_48_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_48_mean_path =  dir_prefix + std::string("batch_normalization_48_mean.bin"); 
-void* batch_normalization_48_mean =  readTrainedWeights(batch_normalization_48_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_48_variance_path =  dir_prefix + std::string("batch_normalization_48_variance.bin"); 
-void* batch_normalization_48_variance =  readTrainedWeights(batch_normalization_48_variance_path.c_str(), 0,1,960,1,1); 
-std::string depthwise_conv2d_17_w_path =  dir_prefix + std::string("depthwise_conv2d_17_w.bin"); 
-void* depthwise_conv2d_17_w =  readTrainedWeights(depthwise_conv2d_17_w_path.c_str(), 0,960,1,3,3); 
-std::string batch_normalization_49_gamma_path =  dir_prefix + std::string("batch_normalization_49_gamma.bin"); 
-void* batch_normalization_49_gamma =  readTrainedWeights(batch_normalization_49_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_49_beta_path =  dir_prefix + std::string("batch_normalization_49_beta.bin"); 
-void* batch_normalization_49_beta =  readTrainedWeights(batch_normalization_49_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_49_mean_path =  dir_prefix + std::string("batch_normalization_49_mean.bin"); 
-void* batch_normalization_49_mean =  readTrainedWeights(batch_normalization_49_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_49_variance_path =  dir_prefix + std::string("batch_normalization_49_variance.bin"); 
-void* batch_normalization_49_variance =  readTrainedWeights(batch_normalization_49_variance_path.c_str(), 0,1,960,1,1); 
-std::string conv2d_34_w_path =  dir_prefix + std::string("conv2d_34_w.bin"); 
-void* conv2d_34_w =  readTrainedWeights(conv2d_34_w_path.c_str(), 0,320,960,1,1); 
-std::string batch_normalization_50_gamma_path =  dir_prefix + std::string("batch_normalization_50_gamma.bin"); 
-void* batch_normalization_50_gamma =  readTrainedWeights(batch_normalization_50_gamma_path.c_str(), 0,1,320,1,1); 
-std::string batch_normalization_50_beta_path =  dir_prefix + std::string("batch_normalization_50_beta.bin"); 
-void* batch_normalization_50_beta =  readTrainedWeights(batch_normalization_50_beta_path.c_str(), 0,1,320,1,1); 
-std::string batch_normalization_50_mean_path =  dir_prefix + std::string("batch_normalization_50_mean.bin"); 
-void* batch_normalization_50_mean =  readTrainedWeights(batch_normalization_50_mean_path.c_str(), 0,1,320,1,1); 
-std::string batch_normalization_50_variance_path =  dir_prefix + std::string("batch_normalization_50_variance.bin"); 
-void* batch_normalization_50_variance =  readTrainedWeights(batch_normalization_50_variance_path.c_str(), 0,1,320,1,1); 
-std::string conv2d_35_w_path =  dir_prefix + std::string("conv2d_35_w.bin"); 
-void* conv2d_35_w =  readTrainedWeights(conv2d_35_w_path.c_str(), 0,1280,320,1,1); 
-std::string batch_normalization_51_gamma_path =  dir_prefix + std::string("batch_normalization_51_gamma.bin"); 
-void* batch_normalization_51_gamma =  readTrainedWeights(batch_normalization_51_gamma_path.c_str(), 0,1,1280,1,1); 
-std::string batch_normalization_51_beta_path =  dir_prefix + std::string("batch_normalization_51_beta.bin"); 
-void* batch_normalization_51_beta =  readTrainedWeights(batch_normalization_51_beta_path.c_str(), 0,1,1280,1,1); 
-std::string batch_normalization_51_mean_path =  dir_prefix + std::string("batch_normalization_51_mean.bin"); 
-void* batch_normalization_51_mean =  readTrainedWeights(batch_normalization_51_mean_path.c_str(), 0,1,1280,1,1); 
-std::string batch_normalization_51_variance_path =  dir_prefix + std::string("batch_normalization_51_variance.bin"); 
-void* batch_normalization_51_variance =  readTrainedWeights(batch_normalization_51_variance_path.c_str(), 0,1,1280,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,5120,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-
-__visc__init(); 
-RootIn* args = static_cast<RootIn*>(malloc(sizeof(RootIn))); 
-
-args->input = input; 
-args->input_bytes = 0; 
-args->conv2d_1_w = conv2d_1_w; 
-args->conv2d_1_w_bytes = 0; 
-args->depthwise_conv2d_1_w = depthwise_conv2d_1_w; 
-args->depthwise_conv2d_1_w_bytes = 0; 
-args->batch_normalization_1_gamma = batch_normalization_1_gamma; 
-args->batch_normalization_1_gamma_bytes = 0; 
-args->batch_normalization_1_beta = batch_normalization_1_beta; 
-args->batch_normalization_1_beta_bytes = 0; 
-args->batch_normalization_1_mean = batch_normalization_1_mean; 
-args->batch_normalization_1_mean_bytes = 0; 
-args->batch_normalization_1_variance = batch_normalization_1_variance; 
-args->batch_normalization_1_variance_bytes = 0; 
-args->conv2d_2_w = conv2d_2_w; 
-args->conv2d_2_w_bytes = 0; 
-args->batch_normalization_2_gamma = batch_normalization_2_gamma; 
-args->batch_normalization_2_gamma_bytes = 0; 
-args->batch_normalization_2_beta = batch_normalization_2_beta; 
-args->batch_normalization_2_beta_bytes = 0; 
-args->batch_normalization_2_mean = batch_normalization_2_mean; 
-args->batch_normalization_2_mean_bytes = 0; 
-args->batch_normalization_2_variance = batch_normalization_2_variance; 
-args->batch_normalization_2_variance_bytes = 0; 
-args->conv2d_3_w = conv2d_3_w; 
-args->conv2d_3_w_bytes = 0; 
-args->batch_normalization_3_gamma = batch_normalization_3_gamma; 
-args->batch_normalization_3_gamma_bytes = 0; 
-args->batch_normalization_3_beta = batch_normalization_3_beta; 
-args->batch_normalization_3_beta_bytes = 0; 
-args->batch_normalization_3_mean = batch_normalization_3_mean; 
-args->batch_normalization_3_mean_bytes = 0; 
-args->batch_normalization_3_variance = batch_normalization_3_variance; 
-args->batch_normalization_3_variance_bytes = 0; 
-args->depthwise_conv2d_2_w = depthwise_conv2d_2_w; 
-args->depthwise_conv2d_2_w_bytes = 0; 
-args->batch_normalization_4_gamma = batch_normalization_4_gamma; 
-args->batch_normalization_4_gamma_bytes = 0; 
-args->batch_normalization_4_beta = batch_normalization_4_beta; 
-args->batch_normalization_4_beta_bytes = 0; 
-args->batch_normalization_4_mean = batch_normalization_4_mean; 
-args->batch_normalization_4_mean_bytes = 0; 
-args->batch_normalization_4_variance = batch_normalization_4_variance; 
-args->batch_normalization_4_variance_bytes = 0; 
-args->conv2d_4_w = conv2d_4_w; 
-args->conv2d_4_w_bytes = 0; 
-args->batch_normalization_5_gamma = batch_normalization_5_gamma; 
-args->batch_normalization_5_gamma_bytes = 0; 
-args->batch_normalization_5_beta = batch_normalization_5_beta; 
-args->batch_normalization_5_beta_bytes = 0; 
-args->batch_normalization_5_mean = batch_normalization_5_mean; 
-args->batch_normalization_5_mean_bytes = 0; 
-args->batch_normalization_5_variance = batch_normalization_5_variance; 
-args->batch_normalization_5_variance_bytes = 0; 
-args->conv2d_5_w = conv2d_5_w; 
-args->conv2d_5_w_bytes = 0; 
-args->batch_normalization_6_gamma = batch_normalization_6_gamma; 
-args->batch_normalization_6_gamma_bytes = 0; 
-args->batch_normalization_6_beta = batch_normalization_6_beta; 
-args->batch_normalization_6_beta_bytes = 0; 
-args->batch_normalization_6_mean = batch_normalization_6_mean; 
-args->batch_normalization_6_mean_bytes = 0; 
-args->batch_normalization_6_variance = batch_normalization_6_variance; 
-args->batch_normalization_6_variance_bytes = 0; 
-args->depthwise_conv2d_3_w = depthwise_conv2d_3_w; 
-args->depthwise_conv2d_3_w_bytes = 0; 
-args->batch_normalization_7_gamma = batch_normalization_7_gamma; 
-args->batch_normalization_7_gamma_bytes = 0; 
-args->batch_normalization_7_beta = batch_normalization_7_beta; 
-args->batch_normalization_7_beta_bytes = 0; 
-args->batch_normalization_7_mean = batch_normalization_7_mean; 
-args->batch_normalization_7_mean_bytes = 0; 
-args->batch_normalization_7_variance = batch_normalization_7_variance; 
-args->batch_normalization_7_variance_bytes = 0; 
-args->conv2d_6_w = conv2d_6_w; 
-args->conv2d_6_w_bytes = 0; 
-args->batch_normalization_8_gamma = batch_normalization_8_gamma; 
-args->batch_normalization_8_gamma_bytes = 0; 
-args->batch_normalization_8_beta = batch_normalization_8_beta; 
-args->batch_normalization_8_beta_bytes = 0; 
-args->batch_normalization_8_mean = batch_normalization_8_mean; 
-args->batch_normalization_8_mean_bytes = 0; 
-args->batch_normalization_8_variance = batch_normalization_8_variance; 
-args->batch_normalization_8_variance_bytes = 0; 
-args->conv2d_7_w = conv2d_7_w; 
-args->conv2d_7_w_bytes = 0; 
-args->batch_normalization_9_gamma = batch_normalization_9_gamma; 
-args->batch_normalization_9_gamma_bytes = 0; 
-args->batch_normalization_9_beta = batch_normalization_9_beta; 
-args->batch_normalization_9_beta_bytes = 0; 
-args->batch_normalization_9_mean = batch_normalization_9_mean; 
-args->batch_normalization_9_mean_bytes = 0; 
-args->batch_normalization_9_variance = batch_normalization_9_variance; 
-args->batch_normalization_9_variance_bytes = 0; 
-args->depthwise_conv2d_4_w = depthwise_conv2d_4_w; 
-args->depthwise_conv2d_4_w_bytes = 0; 
-args->batch_normalization_10_gamma = batch_normalization_10_gamma; 
-args->batch_normalization_10_gamma_bytes = 0; 
-args->batch_normalization_10_beta = batch_normalization_10_beta; 
-args->batch_normalization_10_beta_bytes = 0; 
-args->batch_normalization_10_mean = batch_normalization_10_mean; 
-args->batch_normalization_10_mean_bytes = 0; 
-args->batch_normalization_10_variance = batch_normalization_10_variance; 
-args->batch_normalization_10_variance_bytes = 0; 
-args->conv2d_8_w = conv2d_8_w; 
-args->conv2d_8_w_bytes = 0; 
-args->batch_normalization_11_gamma = batch_normalization_11_gamma; 
-args->batch_normalization_11_gamma_bytes = 0; 
-args->batch_normalization_11_beta = batch_normalization_11_beta; 
-args->batch_normalization_11_beta_bytes = 0; 
-args->batch_normalization_11_mean = batch_normalization_11_mean; 
-args->batch_normalization_11_mean_bytes = 0; 
-args->batch_normalization_11_variance = batch_normalization_11_variance; 
-args->batch_normalization_11_variance_bytes = 0; 
-args->conv2d_9_w = conv2d_9_w; 
-args->conv2d_9_w_bytes = 0; 
-args->batch_normalization_12_gamma = batch_normalization_12_gamma; 
-args->batch_normalization_12_gamma_bytes = 0; 
-args->batch_normalization_12_beta = batch_normalization_12_beta; 
-args->batch_normalization_12_beta_bytes = 0; 
-args->batch_normalization_12_mean = batch_normalization_12_mean; 
-args->batch_normalization_12_mean_bytes = 0; 
-args->batch_normalization_12_variance = batch_normalization_12_variance; 
-args->batch_normalization_12_variance_bytes = 0; 
-args->depthwise_conv2d_5_w = depthwise_conv2d_5_w; 
-args->depthwise_conv2d_5_w_bytes = 0; 
-args->batch_normalization_13_gamma = batch_normalization_13_gamma; 
-args->batch_normalization_13_gamma_bytes = 0; 
-args->batch_normalization_13_beta = batch_normalization_13_beta; 
-args->batch_normalization_13_beta_bytes = 0; 
-args->batch_normalization_13_mean = batch_normalization_13_mean; 
-args->batch_normalization_13_mean_bytes = 0; 
-args->batch_normalization_13_variance = batch_normalization_13_variance; 
-args->batch_normalization_13_variance_bytes = 0; 
-args->conv2d_10_w = conv2d_10_w; 
-args->conv2d_10_w_bytes = 0; 
-args->batch_normalization_14_gamma = batch_normalization_14_gamma; 
-args->batch_normalization_14_gamma_bytes = 0; 
-args->batch_normalization_14_beta = batch_normalization_14_beta; 
-args->batch_normalization_14_beta_bytes = 0; 
-args->batch_normalization_14_mean = batch_normalization_14_mean; 
-args->batch_normalization_14_mean_bytes = 0; 
-args->batch_normalization_14_variance = batch_normalization_14_variance; 
-args->batch_normalization_14_variance_bytes = 0; 
-args->conv2d_11_w = conv2d_11_w; 
-args->conv2d_11_w_bytes = 0; 
-args->batch_normalization_15_gamma = batch_normalization_15_gamma; 
-args->batch_normalization_15_gamma_bytes = 0; 
-args->batch_normalization_15_beta = batch_normalization_15_beta; 
-args->batch_normalization_15_beta_bytes = 0; 
-args->batch_normalization_15_mean = batch_normalization_15_mean; 
-args->batch_normalization_15_mean_bytes = 0; 
-args->batch_normalization_15_variance = batch_normalization_15_variance; 
-args->batch_normalization_15_variance_bytes = 0; 
-args->depthwise_conv2d_6_w = depthwise_conv2d_6_w; 
-args->depthwise_conv2d_6_w_bytes = 0; 
-args->batch_normalization_16_gamma = batch_normalization_16_gamma; 
-args->batch_normalization_16_gamma_bytes = 0; 
-args->batch_normalization_16_beta = batch_normalization_16_beta; 
-args->batch_normalization_16_beta_bytes = 0; 
-args->batch_normalization_16_mean = batch_normalization_16_mean; 
-args->batch_normalization_16_mean_bytes = 0; 
-args->batch_normalization_16_variance = batch_normalization_16_variance; 
-args->batch_normalization_16_variance_bytes = 0; 
-args->conv2d_12_w = conv2d_12_w; 
-args->conv2d_12_w_bytes = 0; 
-args->batch_normalization_17_gamma = batch_normalization_17_gamma; 
-args->batch_normalization_17_gamma_bytes = 0; 
-args->batch_normalization_17_beta = batch_normalization_17_beta; 
-args->batch_normalization_17_beta_bytes = 0; 
-args->batch_normalization_17_mean = batch_normalization_17_mean; 
-args->batch_normalization_17_mean_bytes = 0; 
-args->batch_normalization_17_variance = batch_normalization_17_variance; 
-args->batch_normalization_17_variance_bytes = 0; 
-args->conv2d_13_w = conv2d_13_w; 
-args->conv2d_13_w_bytes = 0; 
-args->batch_normalization_18_gamma = batch_normalization_18_gamma; 
-args->batch_normalization_18_gamma_bytes = 0; 
-args->batch_normalization_18_beta = batch_normalization_18_beta; 
-args->batch_normalization_18_beta_bytes = 0; 
-args->batch_normalization_18_mean = batch_normalization_18_mean; 
-args->batch_normalization_18_mean_bytes = 0; 
-args->batch_normalization_18_variance = batch_normalization_18_variance; 
-args->batch_normalization_18_variance_bytes = 0; 
-args->depthwise_conv2d_7_w = depthwise_conv2d_7_w; 
-args->depthwise_conv2d_7_w_bytes = 0; 
-args->batch_normalization_19_gamma = batch_normalization_19_gamma; 
-args->batch_normalization_19_gamma_bytes = 0; 
-args->batch_normalization_19_beta = batch_normalization_19_beta; 
-args->batch_normalization_19_beta_bytes = 0; 
-args->batch_normalization_19_mean = batch_normalization_19_mean; 
-args->batch_normalization_19_mean_bytes = 0; 
-args->batch_normalization_19_variance = batch_normalization_19_variance; 
-args->batch_normalization_19_variance_bytes = 0; 
-args->conv2d_14_w = conv2d_14_w; 
-args->conv2d_14_w_bytes = 0; 
-args->batch_normalization_20_gamma = batch_normalization_20_gamma; 
-args->batch_normalization_20_gamma_bytes = 0; 
-args->batch_normalization_20_beta = batch_normalization_20_beta; 
-args->batch_normalization_20_beta_bytes = 0; 
-args->batch_normalization_20_mean = batch_normalization_20_mean; 
-args->batch_normalization_20_mean_bytes = 0; 
-args->batch_normalization_20_variance = batch_normalization_20_variance; 
-args->batch_normalization_20_variance_bytes = 0; 
-args->conv2d_15_w = conv2d_15_w; 
-args->conv2d_15_w_bytes = 0; 
-args->batch_normalization_21_gamma = batch_normalization_21_gamma; 
-args->batch_normalization_21_gamma_bytes = 0; 
-args->batch_normalization_21_beta = batch_normalization_21_beta; 
-args->batch_normalization_21_beta_bytes = 0; 
-args->batch_normalization_21_mean = batch_normalization_21_mean; 
-args->batch_normalization_21_mean_bytes = 0; 
-args->batch_normalization_21_variance = batch_normalization_21_variance; 
-args->batch_normalization_21_variance_bytes = 0; 
-args->depthwise_conv2d_8_w = depthwise_conv2d_8_w; 
-args->depthwise_conv2d_8_w_bytes = 0; 
-args->batch_normalization_22_gamma = batch_normalization_22_gamma; 
-args->batch_normalization_22_gamma_bytes = 0; 
-args->batch_normalization_22_beta = batch_normalization_22_beta; 
-args->batch_normalization_22_beta_bytes = 0; 
-args->batch_normalization_22_mean = batch_normalization_22_mean; 
-args->batch_normalization_22_mean_bytes = 0; 
-args->batch_normalization_22_variance = batch_normalization_22_variance; 
-args->batch_normalization_22_variance_bytes = 0; 
-args->conv2d_16_w = conv2d_16_w; 
-args->conv2d_16_w_bytes = 0; 
-args->batch_normalization_23_gamma = batch_normalization_23_gamma; 
-args->batch_normalization_23_gamma_bytes = 0; 
-args->batch_normalization_23_beta = batch_normalization_23_beta; 
-args->batch_normalization_23_beta_bytes = 0; 
-args->batch_normalization_23_mean = batch_normalization_23_mean; 
-args->batch_normalization_23_mean_bytes = 0; 
-args->batch_normalization_23_variance = batch_normalization_23_variance; 
-args->batch_normalization_23_variance_bytes = 0; 
-args->conv2d_17_w = conv2d_17_w; 
-args->conv2d_17_w_bytes = 0; 
-args->batch_normalization_24_gamma = batch_normalization_24_gamma; 
-args->batch_normalization_24_gamma_bytes = 0; 
-args->batch_normalization_24_beta = batch_normalization_24_beta; 
-args->batch_normalization_24_beta_bytes = 0; 
-args->batch_normalization_24_mean = batch_normalization_24_mean; 
-args->batch_normalization_24_mean_bytes = 0; 
-args->batch_normalization_24_variance = batch_normalization_24_variance; 
-args->batch_normalization_24_variance_bytes = 0; 
-args->depthwise_conv2d_9_w = depthwise_conv2d_9_w; 
-args->depthwise_conv2d_9_w_bytes = 0; 
-args->batch_normalization_25_gamma = batch_normalization_25_gamma; 
-args->batch_normalization_25_gamma_bytes = 0; 
-args->batch_normalization_25_beta = batch_normalization_25_beta; 
-args->batch_normalization_25_beta_bytes = 0; 
-args->batch_normalization_25_mean = batch_normalization_25_mean; 
-args->batch_normalization_25_mean_bytes = 0; 
-args->batch_normalization_25_variance = batch_normalization_25_variance; 
-args->batch_normalization_25_variance_bytes = 0; 
-args->conv2d_18_w = conv2d_18_w; 
-args->conv2d_18_w_bytes = 0; 
-args->batch_normalization_26_gamma = batch_normalization_26_gamma; 
-args->batch_normalization_26_gamma_bytes = 0; 
-args->batch_normalization_26_beta = batch_normalization_26_beta; 
-args->batch_normalization_26_beta_bytes = 0; 
-args->batch_normalization_26_mean = batch_normalization_26_mean; 
-args->batch_normalization_26_mean_bytes = 0; 
-args->batch_normalization_26_variance = batch_normalization_26_variance; 
-args->batch_normalization_26_variance_bytes = 0; 
-args->conv2d_19_w = conv2d_19_w; 
-args->conv2d_19_w_bytes = 0; 
-args->batch_normalization_27_gamma = batch_normalization_27_gamma; 
-args->batch_normalization_27_gamma_bytes = 0; 
-args->batch_normalization_27_beta = batch_normalization_27_beta; 
-args->batch_normalization_27_beta_bytes = 0; 
-args->batch_normalization_27_mean = batch_normalization_27_mean; 
-args->batch_normalization_27_mean_bytes = 0; 
-args->batch_normalization_27_variance = batch_normalization_27_variance; 
-args->batch_normalization_27_variance_bytes = 0; 
-args->depthwise_conv2d_10_w = depthwise_conv2d_10_w; 
-args->depthwise_conv2d_10_w_bytes = 0; 
-args->batch_normalization_28_gamma = batch_normalization_28_gamma; 
-args->batch_normalization_28_gamma_bytes = 0; 
-args->batch_normalization_28_beta = batch_normalization_28_beta; 
-args->batch_normalization_28_beta_bytes = 0; 
-args->batch_normalization_28_mean = batch_normalization_28_mean; 
-args->batch_normalization_28_mean_bytes = 0; 
-args->batch_normalization_28_variance = batch_normalization_28_variance; 
-args->batch_normalization_28_variance_bytes = 0; 
-args->conv2d_20_w = conv2d_20_w; 
-args->conv2d_20_w_bytes = 0; 
-args->batch_normalization_29_gamma = batch_normalization_29_gamma; 
-args->batch_normalization_29_gamma_bytes = 0; 
-args->batch_normalization_29_beta = batch_normalization_29_beta; 
-args->batch_normalization_29_beta_bytes = 0; 
-args->batch_normalization_29_mean = batch_normalization_29_mean; 
-args->batch_normalization_29_mean_bytes = 0; 
-args->batch_normalization_29_variance = batch_normalization_29_variance; 
-args->batch_normalization_29_variance_bytes = 0; 
-args->conv2d_21_w = conv2d_21_w; 
-args->conv2d_21_w_bytes = 0; 
-args->batch_normalization_30_gamma = batch_normalization_30_gamma; 
-args->batch_normalization_30_gamma_bytes = 0; 
-args->batch_normalization_30_beta = batch_normalization_30_beta; 
-args->batch_normalization_30_beta_bytes = 0; 
-args->batch_normalization_30_mean = batch_normalization_30_mean; 
-args->batch_normalization_30_mean_bytes = 0; 
-args->batch_normalization_30_variance = batch_normalization_30_variance; 
-args->batch_normalization_30_variance_bytes = 0; 
-args->depthwise_conv2d_11_w = depthwise_conv2d_11_w; 
-args->depthwise_conv2d_11_w_bytes = 0; 
-args->batch_normalization_31_gamma = batch_normalization_31_gamma; 
-args->batch_normalization_31_gamma_bytes = 0; 
-args->batch_normalization_31_beta = batch_normalization_31_beta; 
-args->batch_normalization_31_beta_bytes = 0; 
-args->batch_normalization_31_mean = batch_normalization_31_mean; 
-args->batch_normalization_31_mean_bytes = 0; 
-args->batch_normalization_31_variance = batch_normalization_31_variance; 
-args->batch_normalization_31_variance_bytes = 0; 
-args->conv2d_22_w = conv2d_22_w; 
-args->conv2d_22_w_bytes = 0; 
-args->batch_normalization_32_gamma = batch_normalization_32_gamma; 
-args->batch_normalization_32_gamma_bytes = 0; 
-args->batch_normalization_32_beta = batch_normalization_32_beta; 
-args->batch_normalization_32_beta_bytes = 0; 
-args->batch_normalization_32_mean = batch_normalization_32_mean; 
-args->batch_normalization_32_mean_bytes = 0; 
-args->batch_normalization_32_variance = batch_normalization_32_variance; 
-args->batch_normalization_32_variance_bytes = 0; 
-args->conv2d_23_w = conv2d_23_w; 
-args->conv2d_23_w_bytes = 0; 
-args->batch_normalization_33_gamma = batch_normalization_33_gamma; 
-args->batch_normalization_33_gamma_bytes = 0; 
-args->batch_normalization_33_beta = batch_normalization_33_beta; 
-args->batch_normalization_33_beta_bytes = 0; 
-args->batch_normalization_33_mean = batch_normalization_33_mean; 
-args->batch_normalization_33_mean_bytes = 0; 
-args->batch_normalization_33_variance = batch_normalization_33_variance; 
-args->batch_normalization_33_variance_bytes = 0; 
-args->depthwise_conv2d_12_w = depthwise_conv2d_12_w; 
-args->depthwise_conv2d_12_w_bytes = 0; 
-args->batch_normalization_34_gamma = batch_normalization_34_gamma; 
-args->batch_normalization_34_gamma_bytes = 0; 
-args->batch_normalization_34_beta = batch_normalization_34_beta; 
-args->batch_normalization_34_beta_bytes = 0; 
-args->batch_normalization_34_mean = batch_normalization_34_mean; 
-args->batch_normalization_34_mean_bytes = 0; 
-args->batch_normalization_34_variance = batch_normalization_34_variance; 
-args->batch_normalization_34_variance_bytes = 0; 
-args->conv2d_24_w = conv2d_24_w; 
-args->conv2d_24_w_bytes = 0; 
-args->batch_normalization_35_gamma = batch_normalization_35_gamma; 
-args->batch_normalization_35_gamma_bytes = 0; 
-args->batch_normalization_35_beta = batch_normalization_35_beta; 
-args->batch_normalization_35_beta_bytes = 0; 
-args->batch_normalization_35_mean = batch_normalization_35_mean; 
-args->batch_normalization_35_mean_bytes = 0; 
-args->batch_normalization_35_variance = batch_normalization_35_variance; 
-args->batch_normalization_35_variance_bytes = 0; 
-args->conv2d_25_w = conv2d_25_w; 
-args->conv2d_25_w_bytes = 0; 
-args->batch_normalization_36_gamma = batch_normalization_36_gamma; 
-args->batch_normalization_36_gamma_bytes = 0; 
-args->batch_normalization_36_beta = batch_normalization_36_beta; 
-args->batch_normalization_36_beta_bytes = 0; 
-args->batch_normalization_36_mean = batch_normalization_36_mean; 
-args->batch_normalization_36_mean_bytes = 0; 
-args->batch_normalization_36_variance = batch_normalization_36_variance; 
-args->batch_normalization_36_variance_bytes = 0; 
-args->depthwise_conv2d_13_w = depthwise_conv2d_13_w; 
-args->depthwise_conv2d_13_w_bytes = 0; 
-args->batch_normalization_37_gamma = batch_normalization_37_gamma; 
-args->batch_normalization_37_gamma_bytes = 0; 
-args->batch_normalization_37_beta = batch_normalization_37_beta; 
-args->batch_normalization_37_beta_bytes = 0; 
-args->batch_normalization_37_mean = batch_normalization_37_mean; 
-args->batch_normalization_37_mean_bytes = 0; 
-args->batch_normalization_37_variance = batch_normalization_37_variance; 
-args->batch_normalization_37_variance_bytes = 0; 
-args->conv2d_26_w = conv2d_26_w; 
-args->conv2d_26_w_bytes = 0; 
-args->batch_normalization_38_gamma = batch_normalization_38_gamma; 
-args->batch_normalization_38_gamma_bytes = 0; 
-args->batch_normalization_38_beta = batch_normalization_38_beta; 
-args->batch_normalization_38_beta_bytes = 0; 
-args->batch_normalization_38_mean = batch_normalization_38_mean; 
-args->batch_normalization_38_mean_bytes = 0; 
-args->batch_normalization_38_variance = batch_normalization_38_variance; 
-args->batch_normalization_38_variance_bytes = 0; 
-args->conv2d_27_w = conv2d_27_w; 
-args->conv2d_27_w_bytes = 0; 
-args->batch_normalization_39_gamma = batch_normalization_39_gamma; 
-args->batch_normalization_39_gamma_bytes = 0; 
-args->batch_normalization_39_beta = batch_normalization_39_beta; 
-args->batch_normalization_39_beta_bytes = 0; 
-args->batch_normalization_39_mean = batch_normalization_39_mean; 
-args->batch_normalization_39_mean_bytes = 0; 
-args->batch_normalization_39_variance = batch_normalization_39_variance; 
-args->batch_normalization_39_variance_bytes = 0; 
-args->depthwise_conv2d_14_w = depthwise_conv2d_14_w; 
-args->depthwise_conv2d_14_w_bytes = 0; 
-args->batch_normalization_40_gamma = batch_normalization_40_gamma; 
-args->batch_normalization_40_gamma_bytes = 0; 
-args->batch_normalization_40_beta = batch_normalization_40_beta; 
-args->batch_normalization_40_beta_bytes = 0; 
-args->batch_normalization_40_mean = batch_normalization_40_mean; 
-args->batch_normalization_40_mean_bytes = 0; 
-args->batch_normalization_40_variance = batch_normalization_40_variance; 
-args->batch_normalization_40_variance_bytes = 0; 
-args->conv2d_28_w = conv2d_28_w; 
-args->conv2d_28_w_bytes = 0; 
-args->batch_normalization_41_gamma = batch_normalization_41_gamma; 
-args->batch_normalization_41_gamma_bytes = 0; 
-args->batch_normalization_41_beta = batch_normalization_41_beta; 
-args->batch_normalization_41_beta_bytes = 0; 
-args->batch_normalization_41_mean = batch_normalization_41_mean; 
-args->batch_normalization_41_mean_bytes = 0; 
-args->batch_normalization_41_variance = batch_normalization_41_variance; 
-args->batch_normalization_41_variance_bytes = 0; 
-args->conv2d_29_w = conv2d_29_w; 
-args->conv2d_29_w_bytes = 0; 
-args->batch_normalization_42_gamma = batch_normalization_42_gamma; 
-args->batch_normalization_42_gamma_bytes = 0; 
-args->batch_normalization_42_beta = batch_normalization_42_beta; 
-args->batch_normalization_42_beta_bytes = 0; 
-args->batch_normalization_42_mean = batch_normalization_42_mean; 
-args->batch_normalization_42_mean_bytes = 0; 
-args->batch_normalization_42_variance = batch_normalization_42_variance; 
-args->batch_normalization_42_variance_bytes = 0; 
-args->depthwise_conv2d_15_w = depthwise_conv2d_15_w; 
-args->depthwise_conv2d_15_w_bytes = 0; 
-args->batch_normalization_43_gamma = batch_normalization_43_gamma; 
-args->batch_normalization_43_gamma_bytes = 0; 
-args->batch_normalization_43_beta = batch_normalization_43_beta; 
-args->batch_normalization_43_beta_bytes = 0; 
-args->batch_normalization_43_mean = batch_normalization_43_mean; 
-args->batch_normalization_43_mean_bytes = 0; 
-args->batch_normalization_43_variance = batch_normalization_43_variance; 
-args->batch_normalization_43_variance_bytes = 0; 
-args->conv2d_30_w = conv2d_30_w; 
-args->conv2d_30_w_bytes = 0; 
-args->batch_normalization_44_gamma = batch_normalization_44_gamma; 
-args->batch_normalization_44_gamma_bytes = 0; 
-args->batch_normalization_44_beta = batch_normalization_44_beta; 
-args->batch_normalization_44_beta_bytes = 0; 
-args->batch_normalization_44_mean = batch_normalization_44_mean; 
-args->batch_normalization_44_mean_bytes = 0; 
-args->batch_normalization_44_variance = batch_normalization_44_variance; 
-args->batch_normalization_44_variance_bytes = 0; 
-args->conv2d_31_w = conv2d_31_w; 
-args->conv2d_31_w_bytes = 0; 
-args->batch_normalization_45_gamma = batch_normalization_45_gamma; 
-args->batch_normalization_45_gamma_bytes = 0; 
-args->batch_normalization_45_beta = batch_normalization_45_beta; 
-args->batch_normalization_45_beta_bytes = 0; 
-args->batch_normalization_45_mean = batch_normalization_45_mean; 
-args->batch_normalization_45_mean_bytes = 0; 
-args->batch_normalization_45_variance = batch_normalization_45_variance; 
-args->batch_normalization_45_variance_bytes = 0; 
-args->depthwise_conv2d_16_w = depthwise_conv2d_16_w; 
-args->depthwise_conv2d_16_w_bytes = 0; 
-args->batch_normalization_46_gamma = batch_normalization_46_gamma; 
-args->batch_normalization_46_gamma_bytes = 0; 
-args->batch_normalization_46_beta = batch_normalization_46_beta; 
-args->batch_normalization_46_beta_bytes = 0; 
-args->batch_normalization_46_mean = batch_normalization_46_mean; 
-args->batch_normalization_46_mean_bytes = 0; 
-args->batch_normalization_46_variance = batch_normalization_46_variance; 
-args->batch_normalization_46_variance_bytes = 0; 
-args->conv2d_32_w = conv2d_32_w; 
-args->conv2d_32_w_bytes = 0; 
-args->batch_normalization_47_gamma = batch_normalization_47_gamma; 
-args->batch_normalization_47_gamma_bytes = 0; 
-args->batch_normalization_47_beta = batch_normalization_47_beta; 
-args->batch_normalization_47_beta_bytes = 0; 
-args->batch_normalization_47_mean = batch_normalization_47_mean; 
-args->batch_normalization_47_mean_bytes = 0; 
-args->batch_normalization_47_variance = batch_normalization_47_variance; 
-args->batch_normalization_47_variance_bytes = 0; 
-args->conv2d_33_w = conv2d_33_w; 
-args->conv2d_33_w_bytes = 0; 
-args->batch_normalization_48_gamma = batch_normalization_48_gamma; 
-args->batch_normalization_48_gamma_bytes = 0; 
-args->batch_normalization_48_beta = batch_normalization_48_beta; 
-args->batch_normalization_48_beta_bytes = 0; 
-args->batch_normalization_48_mean = batch_normalization_48_mean; 
-args->batch_normalization_48_mean_bytes = 0; 
-args->batch_normalization_48_variance = batch_normalization_48_variance; 
-args->batch_normalization_48_variance_bytes = 0; 
-args->depthwise_conv2d_17_w = depthwise_conv2d_17_w; 
-args->depthwise_conv2d_17_w_bytes = 0; 
-args->batch_normalization_49_gamma = batch_normalization_49_gamma; 
-args->batch_normalization_49_gamma_bytes = 0; 
-args->batch_normalization_49_beta = batch_normalization_49_beta; 
-args->batch_normalization_49_beta_bytes = 0; 
-args->batch_normalization_49_mean = batch_normalization_49_mean; 
-args->batch_normalization_49_mean_bytes = 0; 
-args->batch_normalization_49_variance = batch_normalization_49_variance; 
-args->batch_normalization_49_variance_bytes = 0; 
-args->conv2d_34_w = conv2d_34_w; 
-args->conv2d_34_w_bytes = 0; 
-args->batch_normalization_50_gamma = batch_normalization_50_gamma; 
-args->batch_normalization_50_gamma_bytes = 0; 
-args->batch_normalization_50_beta = batch_normalization_50_beta; 
-args->batch_normalization_50_beta_bytes = 0; 
-args->batch_normalization_50_mean = batch_normalization_50_mean; 
-args->batch_normalization_50_mean_bytes = 0; 
-args->batch_normalization_50_variance = batch_normalization_50_variance; 
-args->batch_normalization_50_variance_bytes = 0; 
-args->conv2d_35_w = conv2d_35_w; 
-args->conv2d_35_w_bytes = 0; 
-args->batch_normalization_51_gamma = batch_normalization_51_gamma; 
-args->batch_normalization_51_gamma_bytes = 0; 
-args->batch_normalization_51_beta = batch_normalization_51_beta; 
-args->batch_normalization_51_beta_bytes = 0; 
-args->batch_normalization_51_mean = batch_normalization_51_mean; 
-args->batch_normalization_51_mean_bytes = 0; 
-args->batch_normalization_51_variance = batch_normalization_51_variance; 
-args->batch_normalization_51_variance_bytes = 0; 
-args->dense_1_w = dense_1_w; 
-args->dense_1_w_bytes = 0; 
-args->dense_1_b = dense_1_b; 
-args->dense_1_b_bytes = 0; 
-
-void* dfg = __visc__launch(0, root, (void*) args); 
-
-__visc__wait(dfg); 
-
-void *result = static_cast<RootIn*>(args)->input; 
-hpvm_request_tensor(result, 0); 
-
-__visc__cleanup(); 
- computeAccuracy2(labels, 10000, result); 
-return 0; 
-
-} 
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_beta.bin
deleted file mode 100644
index ac0a7aa06c6a9cac74c19ed43fa4fc4114e978ae..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_gamma.bin
deleted file mode 100644
index f4fe5c8b9aa77833eae869f47a7de1bc30445d7b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_mean.bin
deleted file mode 100644
index 887ec6e2f7b5547aac9f08ebb26bb46424fef360..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_variance.bin
deleted file mode 100644
index 623d1458dc4978b1196b6e65a1be6e1f9f1de23a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_10_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_beta.bin
deleted file mode 100644
index 7657f258c2856a685d057f6e3792438d79fb1a60..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_gamma.bin
deleted file mode 100644
index 0d68deb069e67e85a282b3ef204ba3847e57b141..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_mean.bin
deleted file mode 100644
index b0ea894f9c04a5106c2c7e592f791e7ebac6d804..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-TSà?À¤>.Ê/?-pº¿\Ǭ=„(¤?—=?¶kè>„¢Ê=¿>é¾&õݾ`оë´È¾å¼?‘@²QV¿yb‚?C:?G7@
w¢¿Ö¦½È;½4÷ô?¦=ϯ¿@kŽ?dk©¿¸
ÀÔ†<¿wÁ0>X.¿¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_variance.bin
deleted file mode 100644
index f3095ba68312b1217fd9defa267aebe650e0823a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_11_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-€¨?@@FA¤“AÁe@£¼“AÊY”@N0£?ÕA ?™@ëO?Aˆ Å@=ÚA7F±@/!É@+çGA#÷œ@ÛM@žr°@ó#A¯@˰@->@åÌ@í?ã@ê"=@¥yEAôCž@‡ã¡@]ˆ@Ò°t@´b(@ȳŠ@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_beta.bin
deleted file mode 100644
index 14c666c2b5a4e73385d33fdf5c170fca97fc4425..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_gamma.bin
deleted file mode 100644
index 79f025729a07e1beafdf276e79c68f4fff90c4d7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_mean.bin
deleted file mode 100644
index 85ae64046e267811e238c6e7deaaa038ce5e921c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_variance.bin
deleted file mode 100644
index 94df411b86ac3baef22e6a9a84f9dd3525bc628c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_12_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_beta.bin
deleted file mode 100644
index fe943abe2bd464fd36e6d6b370e2b891fb462dca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_gamma.bin
deleted file mode 100644
index b43f0af0c535c991c3c80d5be2f8e37c930d4ab1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_mean.bin
deleted file mode 100644
index 8271b08f2d37294ae1a84af55728fc0b3cf874b7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_variance.bin
deleted file mode 100644
index c68118085bcf02c38b5633d84e6ea6d12cd144e7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_13_variance.bin
+++ /dev/null
@@ -1,6 +0,0 @@
-_ã>—²®>£&¬>æ^Å=·4Y=“õ¤>Å5>™>l?
>ò/ƒ>Pñ>F<È=Á¼#=ÿ–>šŒ¨>SAA>ü
d?F¹=70«=>j¹<3K£=ÿ“8>¯à>šÿÀ<é5>EÚ=&k=ðË‹>ÄsD=½îQ>°:->ñÖ´=VÖ>²ñ >ø}·>G\u>!=9o
->SLp>M>J‘ú>
P6>×E+=OHr=%°>]±?ª¦>Î?Î}Z>¥WW>"B>õ¶Ä>–L?g¸>FÍ=€Lo>w—˜=‚™>U»ƒ=¬½M>“½=“Á·=´°>C-=¨–¨>-6Â>º»´=ž5b=•Ï>Èÿ>¤Ô=~ª>Ù.>†eÇ=ë׋=ýH«>I¹¼=a¿Â=™	Œ>žc<=ˆŠ>å?>šªL>kJÓ=7ü;=Ùs`> M>m?­x>°¤û=P±“>‡TË=	Â?9`§=™Å5>ûàï=âµ=kÿ>°¥>ù%=é…r?Š1>¡L=Loi=©ïG>‹0=	>†˜i>9ð>M#¾=.«£<Áʲ>œè)>4*->(½~=ãi?ä¦~>2/	>ݐÍ>ŒÐš>Cï³>P>dˆŠ=¯y=‡´?&0>Á=¸º>h;¯=<s
->7Ё>ð?k@d>æ÷Ü=
I">©?=Áù=)n=Qm.=:®–=iŒj=KIH>£õ0=K:Æ=èv">T‚=5†?‘zÅ=Û>D¿=v<>8É1?I·ã=叵= >ڏM=QÛ>×,?øSÕ>ËäU>
-€ö=SŠ=õÍ<˜ä>ÌOµ=8Ê	?-WÓ=Þ=Î!?_~H>Vs=W	>fé= Ì‚=yY=
-Õ>v(ž=šw‘>"®„>jI‰>æa?2Þ$>¯ý,>9ÂE>ž
-Ô=­€‰>Nv¥>]‡?BÁ¤>®(=k¶R>|×ñ=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_beta.bin
deleted file mode 100644
index 4507e0cdd399dd91cb1f339804fecf8d3943a363..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-6ÝÑ3|ï²gÊ3Œ¡O³½˜D±ã<?³–Ü´êöH´CY´ˆ}Ò²eQ°3ñçX´tŒö³bŒ¼²•¡'2oD´¹Â ³B
`3»Ú÷3(
-³wxR´’f¸1ƒ¥a´<.4ìg2„f³Ã×´öMr´tó±8Hh´¶“'1— 1
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_gamma.bin
deleted file mode 100644
index 264a6ea84b68e7cdc10f2f5a07595b4f8fef88b7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-s7_?og?îû?"–‰?žÈû>??¥&]?¾O? P?þ‘¥?eMH?òF?v?Ɖk?·p\?»à&?Iëƒ?E€Q?ÓYG?ÊhY?¤—O?ÛA?}¾;?þl‚?'ÓY?žnf?“ë?·N?2S?ƒ¾M?I›p?F¨Ü>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_mean.bin
deleted file mode 100644
index 63861f220b036a39025a7d5c09cd317bcee38aaf..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_mean.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-???ºá ¿Ê¢	¿c?îµ°>
-4¿ùÀÍ}e¿—Ì~?¸¢c?²h–¾v’¬?[&?-Y»?NᨿˆíÀȵÅ>6™í?N:/¿U¸¿<°?»Sg>žÓR?Ñ¿šyù>WY¾šê½HZ¾é¹¯¿ØÀ,q¿&uÀ
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_variance.bin
deleted file mode 100644
index 0b2a6ffbfd5ff1f2896a3fc354d66a4f5fdc0e3d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_14_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-Š
&@@pï@>g@禱@	@ü@vc'@þ@@JµU@ÌJÇ@
åü@rª@äY—Aõ‰@”ßA@¤…@i!Õ@ajû@C[A£#¥@ü×AA-A1‚ñ?û*Ž@|¬â@¡@ðV1@7P¦@Þ4@Uøf@å2Ý@žÄAð~ò@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_beta.bin
deleted file mode 100644
index c2eab10287b66fdd957cf8e8e3796524630b4f36..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_gamma.bin
deleted file mode 100644
index eff5a67a01ae4943e72239339519dcdd65daca69..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_mean.bin
deleted file mode 100644
index ad022601f392144b6db0c93d42c53036a51d62ea..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_variance.bin
deleted file mode 100644
index 0dc43c2edc2791af5ede8cb0020dd1695a91aa62..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_15_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_beta.bin
deleted file mode 100644
index 1351487c8f1055e65182292327403bbf2203bc1a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_gamma.bin
deleted file mode 100644
index 0a7830129923f60c08a2425f26cf0aa0a21ae9c7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_mean.bin
deleted file mode 100644
index 961a3795a8a048ebc37dcc33b4d00edb07f173f9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_variance.bin
deleted file mode 100644
index 5d82ab87ad0fa7481c7a7f2c72eea606c9e297e1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_16_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_beta.bin
deleted file mode 100644
index 1ffa5f2fd9e6a7da9bb08264d1ec493a16687e2f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_gamma.bin
deleted file mode 100644
index 5473a0585098b23a92aa025dc559e4aad9f34848..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_mean.bin
deleted file mode 100644
index a3a5a6b44439a7eb1eb66ba1ef5786e8befdf0fd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-ûœð¾«;??÷ƒ¤?ðÑß=ëS(?rc¿qk‰¿!¿ï²=¿â³¿¾æ4¾¾ñ´å¾ùÖ±>Ó?ù[?>êÛ¾ã`¿e…Ü¿äܽS?+â¾’\ö¾î
À6)Ã>Ý¡¨?¹¦ò½j»¾‰«>?K-/¿ANŽ¿ìXý¹òÀ
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_variance.bin
deleted file mode 100644
index cbb0a0ea9d201c1a2b99308654a770c000d67905..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_17_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-'@`¼ß@>Nœ@äÈ@3e@…4L@òåí?˜ò_@&™¤@Ý9É@FR@L¿ž@2ë@hK¹?0Š@ù@Ѩ@!¼@ÅaÑ?€Ô0@I@Aš3@4Þ@±ÞX@¦Œ@:ÇS@f·é@°@¡¨î?L
î@I#-@bä?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_beta.bin
deleted file mode 100644
index 98544880967f9abef571c70ac9691a1b49227870..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_gamma.bin
deleted file mode 100644
index 342c837b527974f25e5e38739dcf24fd78da9a55..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-»ì~?$€?R@€?‹Ð€?¶?}€?Þy€?˜Ø?Ôª?Ú?A–€?õ,?h×~?KÚ€?ÁÑ~?!?2€?á’}?Ø_?øò?K“v?VŠ€?Ù€?†´?S&€?1:€?š¸€?îË?·n?8€?s €?º?™]?ƒ€?Övy?Ý?݇k?)u~?#€?ú‚?Ӂ?(æ?åé?yž€?sP€?óW€?mÄ?{ˆ€?P·†?:C€?¥/€??A€??«}?¿B€?_•€?`e?¿€?åby?Ô`?’-~?—„€?Ýêg?J$?!u?a%?Aóz?á?oÖ}?è—€?úÄ?{9€?yNz?'\€?所?ä?Ö>u?jW€?i€?ªž€?¤A?F?ùb€?£€?C`€?ù¾ƒ?\'}?ž1€?­ø?!Æ}?݁?H€?™g??k€?Ÿ}?”£€?Êo€?Ûx?Ø2?NÔ?Í	~?KJ€?`€?¡ª?Ã?È!€?@Ä€?À?r£€?â[€?˜v?pè~?±¾€?®ž?U'€?–~‚?Ž€?ï €?Ü—?h+€?«v?9€?¬6€?s£€?ÞT€?#C€?ý€?÷nƒ?ú~?…–?‹Æ?°Æ€?]'€?´?€?y߀?³ày?Ò€?µ$€?&®?ȉ?œ€?9f|?AÚ€?š€?å[?n8€?"¨?h³~?¼q~?žï€?•΁?ïò?ç	?Ñ‹?€ø?j€?$L€?Ž®€?+:€?<P€?˜«?Z3€?Š€~?c)€?f@€??rW~?®d~?uÒ?‡ý€?q;?É¢}?$1€?¤œ€?‡y?	"€?ý_€?nÓ}?tõ?¸¸?ß©?t.€?‰öz?’L?„`€?ª
{?þ?zƒ?oå~?&ju?O=€?
?ìá}?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_mean.bin
deleted file mode 100644
index f637c013cd4a3fd2fee90b5917262e8bcde59832..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_variance.bin
deleted file mode 100644
index 850bb67abc65d8daa297656f01165faa70065027..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_18_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_beta.bin
deleted file mode 100644
index 61b705b1ec1e21dd4c2621b0e95c3c7dcc87a1d0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_gamma.bin
deleted file mode 100644
index beee181086583ea4f6fc65be8b73da213b8ba370..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_mean.bin
deleted file mode 100644
index 93c8a68d3267c60ca0c48e951675012431242113..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_variance.bin
deleted file mode 100644
index b93c15a26a6c524103febd11359b0357c188fb6a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_19_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_beta.bin
deleted file mode 100644
index f60d05e199a34166948e4e8da7f012ea263bd4b5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-eðѾžÐH>C]Æ>ù§ ="¦À¾¶{¾‘ƒ£>uI
-¿‹¹ô½ZÚ?†1¡>P9%?%ö½>^¦ö=ºe®?ÿ‚:¿£>Þˆ¦>5ú/><_žHù¿s’|?•-	?œ–Ò½ŒÖоŸe§¿.ý´¾‚?>N§>
—Q?ƒü?<¼ =
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_gamma.bin
deleted file mode 100644
index 09f172948b4569f147fa7f73ad33b681817f251c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-A“V?¿‚?Ó^]?Åz‡?ŒW“?¿.µ?Ñ`‰?¤?Å-‹?Ô™Ì? Þ¦?_ø?Ú²p?"p?:?¤|”>ßÔ@1h‘?é4?¶÷É?ZîT?ñÿK?3¡?Ò‚?Óz€?_„Ï>Ž?U»P?¹j?™xË?ñ|Ø?aÙ¡?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_mean.bin
deleted file mode 100644
index 6c28e9fd4badced3c970eed1240755849cf27d03..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_variance.bin
deleted file mode 100644
index 9a834ed98e2ce51c2765a4d4a6e5376d26412145..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_1_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-:–²DØÀˆD-ñ½Dð³òC§ŠCs§HCñù"Dè~A?')CBß©AcFÖCÁåDŸÎºBûœUDá“CâuÙDQèJEë…C+ÈbDjMÙB¬ü©BªGTBß]KB÷	CÛŸÐCê‚B“CB…ÕA‘ÆC p„Bý~C;+E
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_beta.bin
deleted file mode 100644
index d98aa4aff62e6f176037638448641e4861134d87..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_gamma.bin
deleted file mode 100644
index 7d6bdb0e951ec51fd446086f80d67da26d9353f6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-[s?¶³„?8Þ³?~¦™?û‚?òÊr?Ý…˜?en’?3îw?E²¡?'I?i\q?£™?eö?£‡?w̦?€?­Ïº?»Q ?\ƒ‹?Èۏ?M“?Òp°?ã†?)o?ÿ ?’ ?©üŒ?µÙ‡?zx?Á¤q?΍¸?¯¶U?¸º?JÿŒ?+&?-4c?˜7†?ž˜¦?_Q–?ÆŒ?wRx?4ß™?¢ò_?	˜?mÀ…?|È?×ø‰?÷
Ž?VÛY?f‹w?O1t?ÅÖ„?È%z?êMp?ªäª?ËòŽ?¯™’?îsŸ?×z?‚y\?QìŠ?¡`“?¯L‡?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_mean.bin
deleted file mode 100644
index 0cb3c55fd86a0d5c1bed8fb6342b5339c3fce17f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_variance.bin
deleted file mode 100644
index fcfe594541eac05c1a876b6620dafb3d93bff777..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_20_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_beta.bin
deleted file mode 100644
index 329cc68c23062f9127754de7972c711a51346145..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_gamma.bin
deleted file mode 100644
index e17b4586794876602de97031481b88e42372220e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_mean.bin
deleted file mode 100644
index 8c7a0c17605c76ce98966181068205173a1b268b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_variance.bin
deleted file mode 100644
index 805df91c2c69dbe45d70cecac6519912ec7d46d2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_21_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_beta.bin
deleted file mode 100644
index 6bbadec8609ba3a1a359b71cd06081b6f452e566..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_gamma.bin
deleted file mode 100644
index 7771e7addf6f95fcf1f458057fccf8dbbfafe63e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_mean.bin
deleted file mode 100644
index 9d0651fad052b818f2a4e82d419a0e25873544e7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_variance.bin
deleted file mode 100644
index a123879d49448a266946d9cfb81dddd5e9664532..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_22_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_beta.bin
deleted file mode 100644
index 03566ebfd9ac4c8a27e336c99f90e66b340ed6ed..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_gamma.bin
deleted file mode 100644
index 5756dbf05c49d21b6ded2ff30d9ad6afff5842db..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-qk?ÁzC?ßä~?&Çd?5o?éÐl?¢|y?=>p?b„?密?Þ‚?GD[?Ä7t?–«{?’ôq?Kñ„?Mi?$9k?·qn?òü`?›O?g*†?)¤i?ä—u?g{?Õ¨t?7Ô{?&Îc?/<{?Äõr?ÓS‡?=_?ÆOm?
-H]?W‰?ª¿?
‚?Z4u?z?'€?ùi?]/{?±1`?o¿o?ïýj?¸—n?l_?ú^?»‚?Z­?ÜYe?–¿N?pW?¡Ø€?r\?&f?MÕb?'„?W]?åk?ƒëx?‚?lp?ÊpU?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_mean.bin
deleted file mode 100644
index cb1f72bc5404d32bc7f7ab1d138b3149f943cdf3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_variance.bin
deleted file mode 100644
index 09bf21f57f8855702bdb66227aef18d57ff20a1f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_23_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-óÛ@Ã]@ÔB@W,@¯üA@j¦?Ð+@y?4@S]Š@š™Ç?÷JÚ?Ÿ Â?ƒ}
@Ю?¬;ñ?›n@FþI@5Õ@e¨3@]Ó%@»@†AÛ@'¾?7ý·?=À@”Ê
-@\û@7­@BÕK@Å×	@ =@„Š€@¹‰@l$Adº&@,!Æ?㧨@%UÎ?‚! @Uºn?ÙÀ@ÉíÜ@
ŒÈ?ÁÐ?ë(­?x!'@£Ï4@?š@ÀšÍ@ÇP·@uC@'.@4pƒ@ö?ú3¥?öR%@Ãá?àÛ?ýõ@S@ë´Ì@|¤ü?¸ï7@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_beta.bin
deleted file mode 100644
index abf3f546b786c5b88759dd8a5c5acc13fec9ceaf..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_gamma.bin
deleted file mode 100644
index a2054b5a05e2d518bcf5bbc92e91ef775bad0c09..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_mean.bin
deleted file mode 100644
index f362fc99cd0b0fb10f571542bace52c9aada8fd9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_variance.bin
deleted file mode 100644
index a0dd7cc7d0edee743541f540087b19746c91cb75..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_24_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_beta.bin
deleted file mode 100644
index bc3b11316999b97450635690967614580e17e63a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_gamma.bin
deleted file mode 100644
index 546853527d305701a49b95c0d1d07b83b9d7e915..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_mean.bin
deleted file mode 100644
index 283cf683e629ee163f29388d5566fa313e6c0cda..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_variance.bin
deleted file mode 100644
index dd8310a36139140150454c19ad46707a50d0cc4f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_25_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_beta.bin
deleted file mode 100644
index b22e90a678237f35f3bf3f1ec2a5b8948739f8ec..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_gamma.bin
deleted file mode 100644
index 360fa6fa170521a527a3a845945636677bfe67ab..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-£Ào?Ìr?ds?*´K?uFZ?Wo?8¾‰?‰y?5J†?ëÔh?òÀr?ub?’l?^^g?ô	_?½Ât?@fs?Ývx?ÓÃw?}?ûy?ou?ƒ?=³‚?ùws?b¨l?ût€?„r?ò‘„?_E[?¶ýn?ÝÒ[?øj?Æi?p”?öðr?—Ÿ…?•¤~?×ò„?3û^?ªÔl?kKx?ÿ3s?ÈV`?^_?,!W?>§†?^°„?ä‹?Ì2…?dêu?6Œ?ꀂ?ß~u?C-^?+o?Nï€?ì†e?…}?º[†?ÛÆƒ?¶\~?£Çh?ל?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_mean.bin
deleted file mode 100644
index 2915ad503f80b9a30df598acf12202d40e8cdc39..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_variance.bin
deleted file mode 100644
index 279c8cbbc7b1ad547239ab9babec1650766eb2c7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_26_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_beta.bin
deleted file mode 100644
index 227567918002692b4c3df1064e255f20aedf850f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_gamma.bin
deleted file mode 100644
index dc8863f5a4e222b85ef2f3e98abe5274da0390df..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_mean.bin
deleted file mode 100644
index 90c3d5578cdabd2bc87a3881b0594a2dc5b475da..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_variance.bin
deleted file mode 100644
index 938c9ad2f6da8898c9a1122d200002b0bf08a40a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_27_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_beta.bin
deleted file mode 100644
index 89f381c5cd85bb0ea50065763cd3878c462ef35e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_gamma.bin
deleted file mode 100644
index 461db2221ab73ad6c97eab693e087a757828b106..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_mean.bin
deleted file mode 100644
index ea2b437e0342456db701294a963a575250984f03..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_variance.bin
deleted file mode 100644
index 2238f7e59c6f00a2a86716482996e9966caa8340..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_28_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_beta.bin
deleted file mode 100644
index 73c10e3e514d1e5a0ed832e797ae9be842dfe98a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_gamma.bin
deleted file mode 100644
index 8bad98200a1e1667dda6f9b80fbd75aeb20b221b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-rm‚?&L’?¼t?$g?!Uo?Î…?S§n?²]€?8°|?…¾c?-óƒ?
ÿj?N‘s?6‚y?›:w?¼}?i0f?šâ€?R‰?	¼w?\?'-ˆ?à‹?¿‹? +h?p2p?Ú¹s?I˜Z?ÉËl?¢²g?ã‚?bņ?kt?æk†?†Ø›?‰_q?wñ€?'£c?¨ë?c j?âÿi?²M‚?[A€?d­o?±Ã‚?Wæk?Öú”?‰Ùs?]‡?˜ie?u4€?Éèƒ?1‹?¯+†?În?äÖ_?Äç„?Y‘w?¦,‚?ÍIh?ý0y?âˆ?~f?óà‰?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_mean.bin
deleted file mode 100644
index 73b9f6f171fd854068e9fb739d09ad5f14b32537..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_variance.bin
deleted file mode 100644
index 6c46e97555114e4a4950ec3a2eb318956a9646c4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_29_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-§R0?ØØö@Qá#?WºB?Ôò@—²@æ?7@ø5„?Ûn?.	ê?Âr?£áp@I±T@b1Ç?Bô?K_@»h?7?¼ö™?›‘@ôë3@Øã)?È@³GŠ?¥°j@«)U?^?4@kÍ£?d@@`I?åé7?õ?qà;@)¦?×I@ôÕí?Nf\?å°B?©ú?Yº@ª—Y?~b7?"½À?Cþø?s/?&d@¢ÊB@½¸¬?a"?•ÿ‹?1›—?¹³“?G_×?–,ƒ?R[?¼E©?ÑÖ:?í™”?K}?I†	@lwq?ÈãR?ª )@Ù!¤@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_beta.bin
deleted file mode 100644
index 82272b5b4fd7c5ab38f8d806cdbae17c2605eee0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_gamma.bin
deleted file mode 100644
index cc700aad14079e9b2638dbd265c4a9daa518358d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-pö¦?Åôµ?¨Öp?&Ú¢?½ýû½úø¯?R…ë?%܃?év?56l?æÈ¹?T°?)°=òß?œ­?.Í>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_mean.bin
deleted file mode 100644
index ce8c1d869e4d333bd1027bb1a372d2eebc866e65..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-:€”@"u\À—?tÀ`F@94H¾ò;@ãºÀ×ÀÞz>nd?À(âº?evå@„‰xÀp—ˆÀº÷õ¿¯Ö¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_variance.bin
deleted file mode 100644
index 9e40a999b5566e533be64b33e61ae32999b3e6b5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_2_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-=ø4A$@Aá<Aß‘!A1—~?zx‹AˆábA@¦‰AiÀ@ØšA›ØÉ@Üqí@³6A=AÜhA”—	A
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_beta.bin
deleted file mode 100644
index 6c5e9c782a730cb12398cbbd0b26b1041ba63e34..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_gamma.bin
deleted file mode 100644
index e6707e194c30d59be2a45426a20c0879e3d7549e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_mean.bin
deleted file mode 100644
index 16989fbce94611fb10ce20e3056220332bdbd4df..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_variance.bin
deleted file mode 100644
index bb358f7dc83f2b76ccda59d5241051c425215afb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_30_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_beta.bin
deleted file mode 100644
index 6750dfad3835ac171aff2b04e8e4f661d2ae4a30..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_gamma.bin
deleted file mode 100644
index 2664b2a76a1d08f38973ace8cc57f0c697246cad..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_mean.bin
deleted file mode 100644
index ee6ade79cf4f7a8b7a87431f45ccdf787536b3b1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_variance.bin
deleted file mode 100644
index f3e51d6709ae085f9c8770a1bcde97091e3f4d56..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_31_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_beta.bin
deleted file mode 100644
index 7cb7dd94f9432febd4637abea098e8f8afe3c4d9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-RAH:=/û7Ý¢><Ò9÷¤W<æ½·Ÿ<2}½DН<4g=Æ 7½=ó<OǼRX•<$*£=	s#½ë´<mìØ;\ù=eؼñû*=YM<=l©$¼ 2é¼êM–<'b%½I¨ò;'ëŸ<ÄQ
-=êà<€Ñ[;T#µ<hE¬;k½vÿ6<S ˜¼qz<¶6O<¸»½ø'`¼K2w¼¿ *½¸Ý¼Ñô»àŒ<½*8½>ö漪žöºMbU¼ª²;.®ã<Ô[˜=ŽA½¿¼Ñ[*½&Õ¶»ê³<~›˜=~8¼ºr\=/Oþ<§	‰½!¶†<
F´¼€†=­4=²=1'ö<äq¨»<<+[<á$l<ü*²<Öl6<ƒY¼ÙFI=ñëº<dÃÒº’*Ÿ=”E½®ƒ“<Sßà¼
Y½;gDz¼BgF=ò­½Û¯Â<CWܼN®"¼Kø)¼ç׫¼&ÄÕ¼‡þì8XÆ&=¬†=ò2ä¼
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_gamma.bin
deleted file mode 100644
index e1ae5d5d41556829fe583e3a3c9642856dda1aec..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-‡Ý†?u†?9%~?O^?G„?…ïŒ?´Ö…?&ºo?³j?O'?÷­a?Ó‡m?MT“?€²k?	úx?d?ˆ›—???±Rp? Œ? ‡?»yŽ?³ƒn?×ü„?»±†?2…j?4{?ŠÈƒ?#t?{&…? .u?$‹?¤Sz?N¹X?)SV?‘‡?ð~??tÆ}?	@‚?¾Ë~?áol?Æu?Ïnƒ?)´–?ÐÂ…?ü?ZI}?œ€?Œe?jø‰?Á>„?øä?ó΃?Ѓ? aŠ?U4i?ûjl?¶‘€?ä?­Ëu?˜?Ñøv?í·f?žZ~?Jk€?ÿ¥q?Ê	|?½ø‡?‘ñƒ?C÷Ž?ªœ‹?MH?·´y?Óß‹?¢ßp?Ç€?Dv?…X€?êça?W8’?®Š?ªPƒ?D|g?|)‚?B‚?£~?Š`n?aU—?I¦v?suu?r|y?Çn‚?qËj?c~y?“øˆ?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_mean.bin
deleted file mode 100644
index 31156781f97be0f1dddbbb13d8b963635d72a7ac..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_mean.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-Ðq|>zŸ¿5õ¿I¡‰?J0\?Y0Ö>Ía&¾óÒ†=öЉ?°†¯½Ó¾.ò5?€Z)?žS?ÔËÓ¾aº³?t2?7(&¿{ε>Ne¡>Š6
-?¾øù>ÒÇg¿¢}f>‘´>$.–¿$¦¾Â\G¾Šp>49 ¿»E·?Œ¹
-¿çÖ³?m¼„¾»|è¾gÞ¨¾šÆR=)?GJŒ¾@´•¾Ž9r¾î@¨?2ä‹¿’¾(õ5?ÕS¿ñ(K?NÌÙ>­]¿b‹4?všH¼?Ðy=¢aN¾Ø±¬>~I½™Ö>÷£C¿XŒ3?BỾdz‹>P¥ê>p™>™>*>\a«½°K?Æê4?ˆi‡?(”
@ÒÑ?	ÕZ?ôP>ùTM?ÌÞ¦>†P™>%‰^¿ô<?¤1¥¿H­¾]@åg->":D¿@µâ>ÁH˜>ìÀé=[?4ìa¾ø:¤?Ÿm$¾ò ½<Q8¿lLÞ>‹Jn¾Dž¾È€ê½ ?nÉ>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_variance.bin
deleted file mode 100644
index f5b17cda2bbd12feeb2783e1b59d63a588abb84d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_32_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_beta.bin
deleted file mode 100644
index 2accb0decdc215b50a4f15cae2c94391fcf32313..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_gamma.bin
deleted file mode 100644
index 623399dabe5d0fc9dc3740a0e5e17b06eed33011..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_mean.bin
deleted file mode 100644
index afc71dee70fd20b5f5182bfa42497372aa1af3ca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_variance.bin
deleted file mode 100644
index 66c91d3f1614bc6208ee29330e1f4c36450ce29f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_33_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_beta.bin
deleted file mode 100644
index 3d23f939d36241b9e2d175aef1bf40209cf74cb1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_gamma.bin
deleted file mode 100644
index 91ccafb53284789ca613395afbb77f00fcba208c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_mean.bin
deleted file mode 100644
index 2fa65a541a89603287a3f35b8be2e3faf23841b1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_variance.bin
deleted file mode 100644
index a7cde4bb591ed19c6658ab2414231d39bd285ab6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_34_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_beta.bin
deleted file mode 100644
index 775868b3d4cdc1e557034907bc91b1a09b611f83..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_beta.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-¶@H::%û7£><ÀÒ9ì¤W<tæ½û¶Ÿ<6}½bН<)g=Á 7½có<:Ǽ;X•<%*£=s#½ä´<¶ìØ;]ù=îdؼâû*=ZM<=”©$¼Ž2é¼ÒM–<!b%½ß§ò;#ëŸ<¿Q
-=÷à<XÑ[;B#µ<•E¬;k½Xÿ6<Z ˜¼cz<ƒ6O<»»½Ù'`¼:2w¼½ *½¸Ý¼Üô»ØŒ<½&8½8öæ¼àžöºzbU¼»©²;4®ã<Ñ[˜=A½¿¼À[*½·Õ¶»0ê³<|›˜=Þ}8¼Ér\=-Oþ<§	‰½#¶†<ïE´¼€†=µ4=²='ö<Éq¨»<<[<!%l<þ*²<úl6<sY¼îFI=ìëº<yÃÒºŽ*Ÿ=£E½„ƒ“<]ßà¼(Y½;gDz¼EgF=Ù­½Ñ¯Â<bWܼz®"¼}ø)¼ä׫¼&ÄÕ¼öì8RÆ&=¨†=ù2ä¼
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_gamma.bin
deleted file mode 100644
index 5a03cc9e000af0b6b1bc51a4f34e669c61916a61..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_mean.bin
deleted file mode 100644
index 7e832b64912c762e4f60a1b83236ac38e76985c6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-ÅD3>+·?^n®>–ð>È}…?lZ‚?Ìtì¾ð럿qýŽ¿Blj¿Š¿§2™¿„I>=F-¿ñ¤¢?{>¾X«¿¿ó¿›ƒ€?ÊBô¾å€?¢è=‚“¾́¿å'ø¾5.0¾ýà?›Vò¾V;ƒ=Æê“?Óҁ¿§P$¾ê–¨¾s:¿J"?'Nv?Ž›­¿þò•¿ ³¾¾&~¿:°¿×A†¿ì}¿J>«çˆ¿Ü|€¿å"Ž¿]Ü ¿·˜¿BÒˆ¿¡H?I{¿¯<¢¿Ãc\¿ìD×>ñ®j¾ÿµ?ät¯¿ÆÙ?Tï>§;¿ƒ‡¾D©y?óÍF=<¦†?Íý>²«Ç¿H+¿>J—>Û½tR6¿† >D ¿V×?mã>•Ì6½(C”¿÷†ú=h¨›¾žŒ?ó¾î¥Å>;.羕˜Ÿ=`o¾¿VWs?yÁ¿g\¾$}$¾æÀØo?²rƒ?•
‰>ý°Û¾»Ò¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_variance.bin
deleted file mode 100644
index 82d53446f5cc8483e4840eb54be6088a621eb5ef..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_35_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_beta.bin
deleted file mode 100644
index 3b76b42665b70b973ea007431b8b7c463dc31734..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_gamma.bin
deleted file mode 100644
index 3ba871074e61f4b683b65d79998752574247aefb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_mean.bin
deleted file mode 100644
index a26149263ba9092db4ec781a1ae2ba3b82fa88ec..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_variance.bin
deleted file mode 100644
index 064303a9f9abb71448cf98cf6a0489eb2d6f7353..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_36_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_beta.bin
deleted file mode 100644
index 3c376be994d0f009088015c90242831a445ee806..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_gamma.bin
deleted file mode 100644
index 39a3d46c1cede6ccca6c27522b9f5e5415aab8dd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_mean.bin
deleted file mode 100644
index bc066c6deb4876726d2d678d11839582062e4dc6..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_variance.bin
deleted file mode 100644
index a7692a6f9935bc6078c5a2d7a797d0c0035db984..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_37_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_beta.bin
deleted file mode 100644
index 986bfb6d373b264a033e3075c465bfce23220ccb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_gamma.bin
deleted file mode 100644
index 8a40b49b7b22dbc91e3511d39affb748c90c8024..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-”ž?9œ?®o?…é?0¼?3Ί?%­}?ßlŒ?§ýy?.ku?‘+?Gæ?Yl?”,‰?@ðÈ?⏆?̼€?s»?_‚v?ªËz?55}?æø‰?Ê“?¥‹u?	Ïo?Zx‚?‚ªt?ùPs?"Ñ‚?«¦‹?Ã{?פ‚?5cƒ?³Î{?Á¹†?Ï£Š?tŠq?QS?•ª‹?`¶?Eo?(‘ž?D¥‚?°{?£a|?±Ì?y:u?¿+•?~eŠ?p~‹?”v?J‹t?â̆?4 ‰?0`~?öQ…?òša?Bn?…é?j?·Er?êm?&~?Á‰†?”:r?x‹?°…?z:€?áx?l•?¼_v?H…?“M?–Áy?ù?Ñq?B¸p?îG€?Ѝw?“z?.Âj?
Ô‰?ã|?qS€?{Hr?}#t?!À†?ŠO…?ïV?ê©„?N„?`©Œ?¼ß?8uˆ?2õ|?AAz?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_mean.bin
deleted file mode 100644
index 8bfc6fd62514783edc8c355a732a0a58c5e93ba0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-=5)?À¥a?—¿ãíº¾©_#¿E?g>—‰t¿q‡¾ÊÖϾˆŽ0?Š&ø½Ðlç>–·Ã¾}¿?ÐDª?âëW>ùÅû>q¹›=ã½ô>ñh3?¤¯¿:¥½ôu¿i­Ž¿’>7>íNǾ¦º£¿ßw¿ó‘þ¾3,<½ª>­5¨¾A&°¾7ü*?È_¿ô(‘¿µ-¾¿ïʾ`*7¿_­!?î>ª¿ë¿`¸'¾8qU?)‹×¾ZX"¿–U?ìè>QUH¾ÁýÆ>jžý>…ú¿,,$¿Ð}ê¾jGb¾4||¾íÔ!¾"ô[¾ˆ¤?v®¾y?¿÷u¼’ó¹>«?7[ÿÑ?N?ýÂá¾/ø¿œÃŸ¾B/?†‹j?ÐDe=â5·>ƒûH?·®s?íÜœ?"”¾`Ãå½å?3g³½ ,£?ŒÄ³¼0 ]?î/?וÑ;¬\«½íè/?
ü²?cÜ,¿:.¿à3³?Dõ>A™¤;ÆQX¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_variance.bin
deleted file mode 100644
index 65cc8769ed8d8d832c7d03d52425fa375273f4ca..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_38_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-…§‰?b0A\uŒ@=XAÈòJ@’kF@Q÷ô@»4 @ÖÉt?ðÎ?­iO?¯j¿@ó2›?\¤@6„’A¹.W@ìPAúÙð?ŒºÑ?Ãöí?lý@µ,Å@Åÿ@Ê5Õ@è?œ?¤*Ö?ñF?»Ì?ÓÊ7@6(–@­´?@Ûž#?“-@Î'@\?Ö¯A£.@^¤ý@õC@/)u?š8·@NŠAçR+?c2/?Óî?k›t@“¶@83I@¬Ÿ“@(½¹?1ªY?	z@pµ»@&hœ@3€’@þhY?7¿ë?™Ÿò?m±<?’ì@½iÛ?Xø;@Ã!ó?òJ@pº=@ýºyAF>W?„@J‡G@%£š@K7@·Ñ@º¦°@@¸?Øi¡?A`	A§M&@5Vâ@ƒv@
-HÃ@JŽˆ@ º? ·ß@b0„@‰]–@ùøè@D%V?É©?®@µ?Š$?Ïã”@v„@¬Ä1@’1_@0ð»@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_beta.bin
deleted file mode 100644
index fa82341689406b9def124ee340b15b78b62177e0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_gamma.bin
deleted file mode 100644
index f82fea8b9d38fdb957aa81af97506200dbf4f412..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_mean.bin
deleted file mode 100644
index 1976868d2a4bc6e47c2e6027baab56bdbd3e8070..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_variance.bin
deleted file mode 100644
index 55eaa9be56c085b80c2986342a30b3e3c129ce37..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_39_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_beta.bin
deleted file mode 100644
index 2e175064045accb1cb4b5da11b9233e0c64aebfd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_gamma.bin
deleted file mode 100644
index 699e6583296ea26f3d5fe5a148a65c51d69aec91..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-Á}?*?ÑÖ?¥ø{?q‘?Ä6u?ëÿz?/¡€?’}?!}?‚Åv?2æ…?r?Œ?	zz?-â€?_áŠ?š?jù{?ÔØ?ÛH„?€?9¡?Êԁ?~àu?Èr?[˜ƒ?G«}?Ó¢}?B;z?®„Ç?nø?2„?œv>.!}?-‚?ìu?¿l?Zã€?qx?ê€?ºÖ€?s?®É€?C{?uՁ?è8y?Öœ?(é€?ÃЃ?nP€?à‰?é|?Œ4}?› |?­?¬s€?q4v?Æ~€?ãŠ~?UJ?$‚z?#Ù€?*†?‘›??‡€?
-?ݯz?}†~?Òo?lw?—Å}?|?ÅPx?„%x?Ÿ²€?j&?äF‚?œ~?Þø{?ZM€?­§~?²«€?€è‚?ßY€?¶{?®Ôy?gz??a±~?~˜„?4…€?i1€?Ã÷~?¢äˆ?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_mean.bin
deleted file mode 100644
index 5cd12392e3e4bc7a9bf543d7e9eaef52f8df9de1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_variance.bin
deleted file mode 100644
index ffc045b4267bf0f8eb71a2e92429faeeed2c4a5a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_3_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_beta.bin
deleted file mode 100644
index c4e58b81c432592e616429a320b62709a7170425..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_gamma.bin
deleted file mode 100644
index 52224cb122d615058db1a15195a56a9056755a23..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_mean.bin
deleted file mode 100644
index b919137bd4eca83a31012956d17a1727c6e4e1bb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_variance.bin
deleted file mode 100644
index b8dabb2676bd0d86c216a6f5bea25b6538e6602f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_40_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_beta.bin
deleted file mode 100644
index e27aa81dd5f4a3b62042cbb4092b84fa4a334751..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_gamma.bin
deleted file mode 100644
index 52d117edbfd1a11fb781af84e706c5a65b123779..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_mean.bin
deleted file mode 100644
index 30dd5805aac1fbe97bf05ae3b7f99f0acf7dc165..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_variance.bin
deleted file mode 100644
index 148f803195691176c3f784620d739e93f7a2c490..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_41_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_beta.bin
deleted file mode 100644
index 427aa5255f92bb36ffade408eb280209560e46b9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_gamma.bin
deleted file mode 100644
index c0f984b6310cee45181bfa137e9b1b816af595da..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_mean.bin
deleted file mode 100644
index f0b4cfaaa4cc2f8f2587c1d109ac8ee1adbead1d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_variance.bin
deleted file mode 100644
index 977095d0ccc6ff0f30895995e13e9356daa2fd2f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_42_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_beta.bin
deleted file mode 100644
index 12419899878a4cc04ee09e0da622447afbdf36ba..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_gamma.bin
deleted file mode 100644
index 17eddfc1a66c14359305c485325f00f2788a0471..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_mean.bin
deleted file mode 100644
index 090eea9085197ec7e1396df6fdb8ad2c0a53e669..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_variance.bin
deleted file mode 100644
index 1971785504ab40839de88b0b930517714cedd51a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_43_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_beta.bin
deleted file mode 100644
index 235264d59b8cd8acd0d1023b59cac858ea9d2c8b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_gamma.bin
deleted file mode 100644
index 3c84efa5834f00b5ba46aa995b205a75d849ff2b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_gamma.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-i&m?2„? «y?Mr?Í%…?íÁ?¯(„?6}?f?4§p?CŽ{?І?@…y?Êu?Þã{?Z±{?Wg?ðÓd?M?p?ÁŠ?# z?q‰o?‹$w?³„?A%€?Äa?$Hp?üÙy?dp?’•|?S?_6†?h[y?îVn?eU~?æƒ?qw?rkr?¥m?¡&?Zg„?ð?¼ëz?ÓÃs?Æ8?	;t?ðDo?ãׁ?\*ˆ?O‡?Âfƒ?Imn?WAx?âÒ‹?¶Ê?œj?ûsr?×Ì|?휀?°;?ø¶u?Æ(ƒ?K¾s?M܉?Ða~?~?çs?½½…?î4…?Öt?rñz?[kƒ?ž¬r?Ôr|?.ço?ßll?Ô*‚?¨R~?E:s?Ç?“?‡?Øòo?H}?’Òz?vÓr?V‚?3€?˜h?¯õz?6@Œ?Ütƒ?íØ|?X?ŠÚw?x?šç{?E'€?ï“w?J:s?&W~?ߐk?”v?ä~?£„?…%q?þÚ€?äþw?\âp?»‚?l¢q?e}?×bw?•Îz?ù³h?øðƒ?¤µ€?’þ|?:uq?eKv?9‰?A€?œu?/Õy?P}?f³…?`
u?~¬†?Cßz?ž?Cv?…lw?Yíˆ?T„?…|€?µTƒ?Ý+u?%ýy?cï€?-ˆƒ?ƒ
-i?vi?+k?B»j?
8?³…‚?'<}?W¯n?Sqˆ?ä?»™€?¦€?œs„?–ûr?ž~?˜”ƒ?½ts?»	{?ÈÏz?.j?}Ws?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_mean.bin
deleted file mode 100644
index 7e0afe909fd90dcbe8b9e7a44731afdea3b09ab1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_variance.bin
deleted file mode 100644
index 0e7e4c0abbc38e80decdb2314878ea58f11927db..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_44_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_beta.bin
deleted file mode 100644
index 8573727bf0a6552bb4a1fe1d98ba25b0ab690dff..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_gamma.bin
deleted file mode 100644
index c9c0ddfa16063c0dd20a5bfe46de359ee661fa8f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_mean.bin
deleted file mode 100644
index fd6d5a2bab381a736e15502e7df859de657a97cf..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_variance.bin
deleted file mode 100644
index 250f5f681e9ac852ac3d936d998ec8fb3ca9588c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_45_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_beta.bin
deleted file mode 100644
index 15744647923460e7298ac8c19c1912375537ab9f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_gamma.bin
deleted file mode 100644
index 927e00c5aa50534ebeaa26ee97701f46d420b2e5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_mean.bin
deleted file mode 100644
index 0bdd42ac5040c31ef2e452cf24eaa215e4a64280..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_variance.bin
deleted file mode 100644
index 5ebeb420c54e1582a3d66d3b56ec17a6e97868af..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_46_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_beta.bin
deleted file mode 100644
index 3ff458e89574786d77a57b7924962b88facde3fd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_gamma.bin
deleted file mode 100644
index a8c51a11452611c923033e13e9c30f8c4587c35e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_mean.bin
deleted file mode 100644
index ed895e5d6b0664f47b464acff3ae44c27d943718..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_variance.bin
deleted file mode 100644
index ce6e5a5b6f8774a82d502b698fc27598ee4c598b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_47_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_beta.bin
deleted file mode 100644
index cf7623e0fefc9e02bc4c5bae57f775d772ce002c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_gamma.bin
deleted file mode 100644
index 55a3f0c3dc3a63f5b4cf0404dc98975ad65aa7f8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_mean.bin
deleted file mode 100644
index 1960ef752cc0f7d8a3589feb18dab78e5ae313ce..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_variance.bin
deleted file mode 100644
index 491851edbf60ec5d8cc180b0c445d188c29a6f24..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_48_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_beta.bin
deleted file mode 100644
index 0ed7df335852b49e79dcdb73ad36e22d76cd56cc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_gamma.bin
deleted file mode 100644
index 7078b3357c512b1ff522b87ad0d370ae9195d00a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_mean.bin
deleted file mode 100644
index 14bbe1e962422c7d7b527e5a2e06c827c38d4c78..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_variance.bin
deleted file mode 100644
index 92f40a4faf3b478306d2fb9f593cc2abab6da646..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_49_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_beta.bin
deleted file mode 100644
index 772f11d60bd27a9172a62667a05c101ae945a885..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_gamma.bin
deleted file mode 100644
index 9ee07b797a2b776351affd0aec0b5c61b9d241b2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_mean.bin
deleted file mode 100644
index 349cf081eccc40cc5c20cedf9696433e689facfa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_variance.bin
deleted file mode 100644
index c65ebc7dead439700cb4dae8e286dd6c4c3825f8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_4_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_beta.bin
deleted file mode 100644
index 9ef7b1a32e874afae65edd6633728b542eeef754..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_gamma.bin
deleted file mode 100644
index 27509f7e3883aa279828a9edde929dbfb59d8e38..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_mean.bin
deleted file mode 100644
index 7f34371f648dbafb0f1026725eb4858684bb2944..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_variance.bin
deleted file mode 100644
index cf3ed0cdf434a43814cfe7f0d678a60d9203fb39..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_50_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_beta.bin
deleted file mode 100644
index 972708ac17a46415a19518bd79d5504505bffdf5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_gamma.bin
deleted file mode 100644
index 7e483f257d334f877ec04eca720e590c27d34e61..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_mean.bin
deleted file mode 100644
index b72d846c086e394c568efc7d16b3319f859952f8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_variance.bin
deleted file mode 100644
index cc6e1e8f68204e24085eecc04faa140f70bca878..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_51_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_beta.bin
deleted file mode 100644
index a6c775acc915c85bb2254d9cf3080fcbe8a6a877..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_beta.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-<©m´Š£<3~!´Å#85lÒ5¿x3S·ö4Sµ%Y´`F@´rDŠ4Ckµ&´ßZ³´ØY4Àžÿ´
-B“´ª5¼„
-4%4¿3(FÌ4œYh³6´Ø0½¬«´
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_gamma.bin
deleted file mode 100644
index 36cff8e345b911d4bc18460d8aab1e09fbaa2cdb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
- L±?e”?Ši‡?Vèš?(U•?s›ª??õ¼½zÙ?Ë>½?=D–?­¬é>Ѝ±?mÙÆ?Õ?ñ¨Ù?ÏŠ–?žSc?<á?Áç}?Kq?©ê?ƒ__?M6—?:ݽ?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_mean.bin
deleted file mode 100644
index 7110d5c9eb090f11889dcab5177432db76e2d4c3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_mean.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-×ÿ%À„å¿m'?-®à¿Å•¿¹º„¾Ç®¿ê
-}?š‡‘?eÁ?Àëû¿zÿ‡¿%À;»¿©®œ¾ÁÍ?=Ve?™1ÀwÙ¿ß¿|r¿¾½„¤>¦¬¿kH?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_variance.bin
deleted file mode 100644
index 03272b5b45a9524b50f9e064214f9327261b1295..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_5_variance.bin
+++ /dev/null
@@ -1 +0,0 @@
-.Ÿ•AI;«@Ì';@÷€r@—@ï&Þ?Ĉ@xÃŒ@°*AZE@|@<	Ó@ikAÓ¾@`9ÂAm%L@ÊxM@laä@„¥ž@¦Vo@”ULAƒ;À?) Ç@‹lA
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_beta.bin
deleted file mode 100644
index 74474b6d49c7f55b76cbe3845e6fb5967c1dddcc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_gamma.bin
deleted file mode 100644
index 40a5c2db6eebf96469c002b60fb5907b2fecb9c0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_mean.bin
deleted file mode 100644
index 80d18385c8c30e77d09a80726eb33b869b07e217..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_variance.bin
deleted file mode 100644
index 3a480219e0b77c94678bec0570ee6fa96dee27e5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_6_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_beta.bin
deleted file mode 100644
index d5d48b15f38a3c8b7325ec1ada6d149ede1f2e0c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_gamma.bin
deleted file mode 100644
index 55bf19339044606316f77b2d0d54622641085d89..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_mean.bin
deleted file mode 100644
index c594093453cd2e010818ef71f1576e8542e55776..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_variance.bin
deleted file mode 100644
index ab7a15d9a03f4c93b65b65c2f3dd1b9db5750bb1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_7_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_beta.bin
deleted file mode 100644
index 45f92915c32a2d651164f6a1e68f2c46196c9e5d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_beta.bin
+++ /dev/null
@@ -1 +0,0 @@
-ùY´E´U¼³Ûþ³Ú5òý§2)©Ð3§4&õ³oÆ×³a°Û³¯S³R~/4ˆh³À“©4ØK53±Õ³§ª30÷³¤ž´¢‡5奕2çÞ²³¯a´
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_gamma.bin
deleted file mode 100644
index 4bbd31c1c6daf904acdf4fee9f7d900677122ab3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_gamma.bin
+++ /dev/null
@@ -1 +0,0 @@
-1u?>fi?©Û#?TÖ	?îJB?ÞcÕ>­k?*;0?¯Šï>1M?2óO?9)?t4€?§ ?­!?4#?ć]?Ó£v?¾ìC?ùÍV?©Å?;~]?ˆët?v‘?
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_mean.bin
deleted file mode 100644
index 98327dea0be901b1a27f081ab7d9b39fdaeb99cb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_mean.bin
+++ /dev/null
@@ -1 +0,0 @@
-»ÒÓ¿9å¸?YÞ–?FM?.ÚÏ¿ý>N¿ÎÇ£?9b8@'‰–?ì£)?:ÿú¾k¡´?Foà>Dh¨?)¾o{ú?±Ž’¾W×ÀžU¯¿Á‘¾ØS¶?Ša¿,Ú¿+k¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_variance.bin
deleted file mode 100644
index db76ea36d6fc38dd6d0f9c39c22096527ee83712..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_8_variance.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-Ï{©A
-z‡@$¹É@‰Çå@Ý¼@Ð@àwA‡ïwA]°–@>î^@+´,AïmAKa–A9é@(EgA|þAÛGAp×@í«>AŠ+@q9Ak”@†ºÍ@èJq@
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_beta.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_beta.bin
deleted file mode 100644
index 105ec5c4710d3f6e850adc2306598acfdcb7f73b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_beta.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_gamma.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_gamma.bin
deleted file mode 100644
index 95462f7dc4d0041ef2f472538ff2d3cf4aa848e9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_gamma.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_mean.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_mean.bin
deleted file mode 100644
index dc33f219b4f2d06f69371d7fa4f9b3ef761b410a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_mean.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_variance.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_variance.bin
deleted file mode 100644
index 0faecea8d8d19d1ac3e1f4c480003e030141e69e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/batch_normalization_9_variance.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_10_w.bin
deleted file mode 100644
index dc0a93e481da56be867f0eb1c00da51e17e8e141..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_11_w.bin
deleted file mode 100644
index 2ed96f21e7eb16d5b09f452db021007e7f90d25e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_12_w.bin
deleted file mode 100644
index 8f25d49f21ff8dca574d850c3e861c6f6a47b16c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_13_w.bin
deleted file mode 100644
index f2f017d79f24ee229036f5a3c1fcb5a26034c3eb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_14_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_14_w.bin
deleted file mode 100644
index 61d258cf8e8b6bdbd82992ee25bbed510091b2ef..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_14_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_15_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_15_w.bin
deleted file mode 100644
index 4c0664bf19cc0276988667f5947d291065d90185..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_15_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_16_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_16_w.bin
deleted file mode 100644
index c31771ed6e281fc3bae615f8215e917d400f452a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_16_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_17_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_17_w.bin
deleted file mode 100644
index 375291ea7bf75703223fb1690c3b169a32d3db40..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_17_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_18_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_18_w.bin
deleted file mode 100644
index 3a55e73daa0da8278342efcbe49d253a73a97fbb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_18_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_19_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_19_w.bin
deleted file mode 100644
index f022c71f53cf175ce857c0309aa9753a09fb8f6d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_19_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_1_w.bin
deleted file mode 100644
index e451d8e2f0e097fe75b1c7ed43cc418272a8832d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_20_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_20_w.bin
deleted file mode 100644
index bd046934f95a9e3aaca6ce9c80b8a1555c61207f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_20_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_21_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_21_w.bin
deleted file mode 100644
index 5ebb453e22012f2544259ac60efdda1fb0a4aac7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_21_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_22_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_22_w.bin
deleted file mode 100644
index c03fcd4fdc31fc5708ce57d4571e00fb9bcd61e8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_22_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_23_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_23_w.bin
deleted file mode 100644
index 556ac89de9d048f5579be4aa1a8ca8174444dccf..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_23_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_24_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_24_w.bin
deleted file mode 100644
index daa925a84b6e8ebdeebb921dcf0341be53c102d9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_24_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_25_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_25_w.bin
deleted file mode 100644
index 1884d43203c132d359a422abe7d316ca22fdf579..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_25_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_26_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_26_w.bin
deleted file mode 100644
index e86178e4a6d081612580177421183669e7c296ea..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_26_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_27_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_27_w.bin
deleted file mode 100644
index 9d5a302ba7c8fbfe63c8bf200e913100de38977f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_27_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_28_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_28_w.bin
deleted file mode 100644
index e83e46ea30c7803c096e26ccdc96ae06ac989997..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_28_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_29_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_29_w.bin
deleted file mode 100644
index 3c554ae0a8b137d9260cf6f028f163da02446cd1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_29_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_2_w.bin
deleted file mode 100644
index 7fbbe865087bfeff67d1b3a5950b5733f0bf86dd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_30_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_30_w.bin
deleted file mode 100644
index 5cc75041cb13308743030690cebba25f3ec1bf9a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_30_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_31_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_31_w.bin
deleted file mode 100644
index fe50ba933ac39914bd9747a521c47ed1b23bb680..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_31_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_32_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_32_w.bin
deleted file mode 100644
index 7d3d00dd5c9f09a1383df26228388845397fe1a3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_32_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_33_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_33_w.bin
deleted file mode 100644
index 5689e07639cc334a48f0ff2782f07be21bb1d3cf..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_33_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_34_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_34_w.bin
deleted file mode 100644
index 87cd912048857fa22bf9eef76945bb997e085d04..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_34_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_35_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_35_w.bin
deleted file mode 100644
index b9a2523d4aa279c3384bb5c865beb8ce1889875b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_35_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_3_w.bin
deleted file mode 100644
index fb044e2d3b542a3db5ba0b03c0155f775e9a5def..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_4_w.bin
deleted file mode 100644
index 919f1601c0199f9aeb507ed5093258d32ef4c634..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_5_w.bin
deleted file mode 100644
index 096598438c4752ea7de0ad7d8e10916ba56b7666..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_6_w.bin
deleted file mode 100644
index 569ec0fc554868db0843668094948d25074f278e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_7_w.bin
deleted file mode 100644
index 7a241bee7f98e35c1642c99eacead16527a94a12..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_8_w.bin
deleted file mode 100644
index cbf4a3f50e8528844a03070be34ee626e647df71..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_9_w.bin
deleted file mode 100644
index 8346f227559c8c235d9d975b0847d6e4c96f5b2b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/dense_1_b.bin
deleted file mode 100644
index dc012aa4ebb21937ddc9e5f7720f5164aba95ab6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/dense_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-~ûÀ¼+,!¾·g½K@n>“±w=aÍ<ŸX<îW;ó5‘½Ì´¼
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/dense_1_w.bin
deleted file mode 100644
index af593b5bcd5299d1a64a1c724f3e75d14fc79d52..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_10_w.bin
deleted file mode 100644
index de4e3e8963f94350cf544fc0a724cc50b16ef70a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_11_w.bin
deleted file mode 100644
index 02c84c82b59db3e63ba78facef040101ce318d2e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_12_w.bin
deleted file mode 100644
index 93dcf1124fc6f9af8269bab52bb5ae4971fde099..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_13_w.bin
deleted file mode 100644
index 2af29e1c14ec7db85fb0c6e1d40e8c4c342efc3e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_14_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_14_w.bin
deleted file mode 100644
index 7ad2f4799b75990a1b76f8a3199cac84e0488392..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_14_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_15_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_15_w.bin
deleted file mode 100644
index 2c34a803008a6674581866931ccb67285a7edf70..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_15_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_16_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_16_w.bin
deleted file mode 100644
index eb19459284ac01b8a8184ac98769d9ad495f3649..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_16_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_17_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_17_w.bin
deleted file mode 100644
index 114a8f1516f9ba19f5f9bb5ee54229ed88ed6695..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_17_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_1_w.bin
deleted file mode 100644
index a2bf4210e56a94ccab5a1d0ddddac5d0b5f1da97..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_2_w.bin
deleted file mode 100644
index b3b03e8fd5fe5308a9bac47237583236c27db3c8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_3_w.bin
deleted file mode 100644
index 4f78354458841040c546e331462d207cc1905389..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_4_w.bin
deleted file mode 100644
index fef63faeee496fad32f6e0ed9f7ae4a1f1937071..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_5_w.bin
deleted file mode 100644
index 07e6c4a0a530bc058a98c9d5733a81f837f74b22..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_6_w.bin
deleted file mode 100644
index 669641199919240ba349e6f4ffce2ebc52b8ee3b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_7_w.bin
deleted file mode 100644
index e49bcb1bb5ac8fbf238f6c6c9acd08327aa83f8b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_8_w.bin
deleted file mode 100644
index 2072437cf5fcceeefe8a37f04375e6fdd6e9bb7f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_9_w.bin
deleted file mode 100644
index 9ca3d818c2669e612ff48911b736b81ea5097f1a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/depthwise_conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/input.bin
deleted file mode 100644
index 21cd046466ec09d3b7c22d11331e4f2bf2dec611..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/layer_composition.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/layer_composition.txt
deleted file mode 100644
index 64209b6c7b5837927d74063cce52204d34b77812..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/layer_composition.txt
+++ /dev/null
@@ -1,149 +0,0 @@
-conv  
-
-
-activation  
-conv  
-
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-add  
-conv  
-
-activation  
-
-
-activation  
-conv  
-
-conv  
-
-activation  
-pool  
-dense  add  
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/layers.txt b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/layers.txt
deleted file mode 100644
index 7d91a01bb6bbff41e6ce77a28368502090bb65b7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/layers.txt
+++ /dev/null
@@ -1,149 +0,0 @@
-Conv1,10000,3,32,32,32,3,3,3
-#tensorDepthwiseConv1
-#tensorBatchNorm1
-#tensorRelu1
-Conv2,10000,32,32,32,16,32,1,1
-#tensorBatchNorm2
-Conv3,10000,16,32,32,96,16,1,1
-#tensorBatchNorm3
-#tensorRelu2
-#tensorDepthwiseConv2
-#tensorBatchNorm4
-#tensorRelu3
-Conv4,10000,96,32,32,24,96,1,1
-#tensorBatchNorm5
-Conv5,10000,24,32,32,144,24,1,1
-#tensorBatchNorm6
-#tensorRelu4
-#tensorDepthwiseConv3
-#tensorBatchNorm7
-#tensorRelu5
-Conv6,10000,144,32,32,24,144,1,1
-#tensorBatchNorm8
-#tensorAdd1
-Conv7,10000,24,32,32,144,24,1,1
-#tensorBatchNorm9
-#tensorRelu6
-#tensorDepthwiseConv4
-#tensorBatchNorm10
-#tensorRelu7
-Conv8,10000,144,16,16,32,144,1,1
-#tensorBatchNorm11
-Conv9,10000,32,16,16,192,32,1,1
-#tensorBatchNorm12
-#tensorRelu8
-#tensorDepthwiseConv5
-#tensorBatchNorm13
-#tensorRelu9
-Conv10,10000,192,16,16,32,192,1,1
-#tensorBatchNorm14
-#tensorAdd2
-Conv11,10000,32,16,16,192,32,1,1
-#tensorBatchNorm15
-#tensorRelu10
-#tensorDepthwiseConv6
-#tensorBatchNorm16
-#tensorRelu11
-Conv12,10000,192,16,16,32,192,1,1
-#tensorBatchNorm17
-#tensorAdd3
-Conv13,10000,32,16,16,192,32,1,1
-#tensorBatchNorm18
-#tensorRelu12
-#tensorDepthwiseConv7
-#tensorBatchNorm19
-#tensorRelu13
-Conv14,10000,192,8,8,64,192,1,1
-#tensorBatchNorm20
-Conv15,10000,64,8,8,384,64,1,1
-#tensorBatchNorm21
-#tensorRelu14
-#tensorDepthwiseConv8
-#tensorBatchNorm22
-#tensorRelu15
-Conv16,10000,384,8,8,64,384,1,1
-#tensorBatchNorm23
-#tensorAdd4
-Conv17,10000,64,8,8,384,64,1,1
-#tensorBatchNorm24
-#tensorRelu16
-#tensorDepthwiseConv9
-#tensorBatchNorm25
-#tensorRelu17
-Conv18,10000,384,8,8,64,384,1,1
-#tensorBatchNorm26
-#tensorAdd5
-Conv19,10000,64,8,8,384,64,1,1
-#tensorBatchNorm27
-#tensorRelu18
-#tensorDepthwiseConv10
-#tensorBatchNorm28
-#tensorRelu19
-Conv20,10000,384,8,8,64,384,1,1
-#tensorBatchNorm29
-#tensorAdd6
-Conv21,10000,64,8,8,384,64,1,1
-#tensorBatchNorm30
-#tensorRelu20
-#tensorDepthwiseConv11
-#tensorBatchNorm31
-#tensorRelu21
-Conv22,10000,384,8,8,96,384,1,1
-#tensorBatchNorm32
-Conv23,10000,96,8,8,576,96,1,1
-#tensorBatchNorm33
-#tensorRelu22
-#tensorDepthwiseConv12
-#tensorBatchNorm34
-#tensorRelu23
-Conv24,10000,576,8,8,96,576,1,1
-#tensorBatchNorm35
-#tensorAdd7
-Conv25,10000,96,8,8,576,96,1,1
-#tensorBatchNorm36
-#tensorRelu24
-#tensorDepthwiseConv13
-#tensorBatchNorm37
-#tensorRelu25
-Conv26,10000,576,8,8,96,576,1,1
-#tensorBatchNorm38
-#tensorAdd8
-Conv27,10000,96,8,8,576,96,1,1
-#tensorBatchNorm39
-#tensorRelu26
-#tensorDepthwiseConv14
-#tensorBatchNorm40
-#tensorRelu27
-Conv28,10000,576,4,4,160,576,1,1
-#tensorBatchNorm41
-Conv29,10000,160,4,4,960,160,1,1
-#tensorBatchNorm42
-#tensorRelu28
-#tensorDepthwiseConv15
-#tensorBatchNorm43
-#tensorRelu29
-Conv30,10000,960,4,4,160,960,1,1
-#tensorBatchNorm44
-#tensorAdd9
-Conv31,10000,160,4,4,960,160,1,1
-#tensorBatchNorm45
-#tensorRelu30
-#tensorDepthwiseConv16
-#tensorBatchNorm46
-#tensorRelu31
-Conv32,10000,960,4,4,160,960,1,1
-#tensorBatchNorm47
-#tensorAdd10
-Conv33,10000,160,4,4,960,160,1,1
-#tensorBatchNorm48
-#tensorRelu32
-#tensorDepthwiseConv17
-#tensorBatchNorm49
-#tensorRelu33
-Conv34,10000,960,4,4,320,960,1,1
-#tensorBatchNorm50
-Conv35,10000,320,4,4,1280,320,1,1
-#tensorBatchNorm51
-#tensorRelu34
-#tensorPooling1
-FC1,10000,5120,5120,10
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/promise_src.cc
deleted file mode 100644
index 430b8afddf640fe3ff251a734ac315e430947618..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/promise_src.cc
+++ /dev/null
@@ -1,726 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(1); 
-
-int total_runs = 1; 
-for (int i = 0 ; i < total_runs; i++){ 
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 2000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-std::string dir_prefix = std::string("../../keras/data/mobilenetv2_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,32,1,1); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,16,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,16,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,16,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,96,16,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,96,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,96,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,24,96,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,24,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,144,24,1,1); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,144,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,144,1,3,3); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,144,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,24,144,1,1); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,24,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,144,24,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,144,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,144,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,144,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,144,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,192,32,1,1); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,192,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,192,1,3,3); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,192,1,1); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,192,32,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,192,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,192,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,192,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,192,32,1,1); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,192,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,192,1,3,3); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,64,192,1,1); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,384,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,384,1,1); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_28_gamma_path =  dir_prefix + std::string("batch_normalization_28_gamma.bin"); 
-void* batch_normalization_28_gamma =  readTrainedWeights(batch_normalization_28_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_28_beta_path =  dir_prefix + std::string("batch_normalization_28_beta.bin"); 
-void* batch_normalization_28_beta =  readTrainedWeights(batch_normalization_28_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_28_mean_path =  dir_prefix + std::string("batch_normalization_28_mean.bin"); 
-void* batch_normalization_28_mean =  readTrainedWeights(batch_normalization_28_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_28_variance_path =  dir_prefix + std::string("batch_normalization_28_variance.bin"); 
-void* batch_normalization_28_variance =  readTrainedWeights(batch_normalization_28_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,384,1,1); 
-std::string batch_normalization_29_gamma_path =  dir_prefix + std::string("batch_normalization_29_gamma.bin"); 
-void* batch_normalization_29_gamma =  readTrainedWeights(batch_normalization_29_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_29_beta_path =  dir_prefix + std::string("batch_normalization_29_beta.bin"); 
-void* batch_normalization_29_beta =  readTrainedWeights(batch_normalization_29_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_29_mean_path =  dir_prefix + std::string("batch_normalization_29_mean.bin"); 
-void* batch_normalization_29_mean =  readTrainedWeights(batch_normalization_29_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_29_variance_path =  dir_prefix + std::string("batch_normalization_29_variance.bin"); 
-void* batch_normalization_29_variance =  readTrainedWeights(batch_normalization_29_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_30_gamma_path =  dir_prefix + std::string("batch_normalization_30_gamma.bin"); 
-void* batch_normalization_30_gamma =  readTrainedWeights(batch_normalization_30_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_30_beta_path =  dir_prefix + std::string("batch_normalization_30_beta.bin"); 
-void* batch_normalization_30_beta =  readTrainedWeights(batch_normalization_30_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_30_mean_path =  dir_prefix + std::string("batch_normalization_30_mean.bin"); 
-void* batch_normalization_30_mean =  readTrainedWeights(batch_normalization_30_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_30_variance_path =  dir_prefix + std::string("batch_normalization_30_variance.bin"); 
-void* batch_normalization_30_variance =  readTrainedWeights(batch_normalization_30_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_31_gamma_path =  dir_prefix + std::string("batch_normalization_31_gamma.bin"); 
-void* batch_normalization_31_gamma =  readTrainedWeights(batch_normalization_31_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_31_beta_path =  dir_prefix + std::string("batch_normalization_31_beta.bin"); 
-void* batch_normalization_31_beta =  readTrainedWeights(batch_normalization_31_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_31_mean_path =  dir_prefix + std::string("batch_normalization_31_mean.bin"); 
-void* batch_normalization_31_mean =  readTrainedWeights(batch_normalization_31_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_31_variance_path =  dir_prefix + std::string("batch_normalization_31_variance.bin"); 
-void* batch_normalization_31_variance =  readTrainedWeights(batch_normalization_31_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_22_w_path =  dir_prefix + std::string("conv2d_22_w.bin"); 
-void* conv2d_22_w =  readTrainedWeights(conv2d_22_w_path.c_str(), 0,96,384,1,1); 
-std::string batch_normalization_32_gamma_path =  dir_prefix + std::string("batch_normalization_32_gamma.bin"); 
-void* batch_normalization_32_gamma =  readTrainedWeights(batch_normalization_32_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_32_beta_path =  dir_prefix + std::string("batch_normalization_32_beta.bin"); 
-void* batch_normalization_32_beta =  readTrainedWeights(batch_normalization_32_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_32_mean_path =  dir_prefix + std::string("batch_normalization_32_mean.bin"); 
-void* batch_normalization_32_mean =  readTrainedWeights(batch_normalization_32_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_32_variance_path =  dir_prefix + std::string("batch_normalization_32_variance.bin"); 
-void* batch_normalization_32_variance =  readTrainedWeights(batch_normalization_32_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_23_w_path =  dir_prefix + std::string("conv2d_23_w.bin"); 
-void* conv2d_23_w =  readTrainedWeights(conv2d_23_w_path.c_str(), 0,576,96,1,1); 
-std::string batch_normalization_33_gamma_path =  dir_prefix + std::string("batch_normalization_33_gamma.bin"); 
-void* batch_normalization_33_gamma =  readTrainedWeights(batch_normalization_33_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_33_beta_path =  dir_prefix + std::string("batch_normalization_33_beta.bin"); 
-void* batch_normalization_33_beta =  readTrainedWeights(batch_normalization_33_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_33_mean_path =  dir_prefix + std::string("batch_normalization_33_mean.bin"); 
-void* batch_normalization_33_mean =  readTrainedWeights(batch_normalization_33_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_33_variance_path =  dir_prefix + std::string("batch_normalization_33_variance.bin"); 
-void* batch_normalization_33_variance =  readTrainedWeights(batch_normalization_33_variance_path.c_str(), 0,1,576,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,576,1,3,3); 
-std::string batch_normalization_34_gamma_path =  dir_prefix + std::string("batch_normalization_34_gamma.bin"); 
-void* batch_normalization_34_gamma =  readTrainedWeights(batch_normalization_34_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_34_beta_path =  dir_prefix + std::string("batch_normalization_34_beta.bin"); 
-void* batch_normalization_34_beta =  readTrainedWeights(batch_normalization_34_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_34_mean_path =  dir_prefix + std::string("batch_normalization_34_mean.bin"); 
-void* batch_normalization_34_mean =  readTrainedWeights(batch_normalization_34_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_34_variance_path =  dir_prefix + std::string("batch_normalization_34_variance.bin"); 
-void* batch_normalization_34_variance =  readTrainedWeights(batch_normalization_34_variance_path.c_str(), 0,1,576,1,1); 
-std::string conv2d_24_w_path =  dir_prefix + std::string("conv2d_24_w.bin"); 
-void* conv2d_24_w =  readTrainedWeights(conv2d_24_w_path.c_str(), 0,96,576,1,1); 
-std::string batch_normalization_35_gamma_path =  dir_prefix + std::string("batch_normalization_35_gamma.bin"); 
-void* batch_normalization_35_gamma =  readTrainedWeights(batch_normalization_35_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_35_beta_path =  dir_prefix + std::string("batch_normalization_35_beta.bin"); 
-void* batch_normalization_35_beta =  readTrainedWeights(batch_normalization_35_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_35_mean_path =  dir_prefix + std::string("batch_normalization_35_mean.bin"); 
-void* batch_normalization_35_mean =  readTrainedWeights(batch_normalization_35_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_35_variance_path =  dir_prefix + std::string("batch_normalization_35_variance.bin"); 
-void* batch_normalization_35_variance =  readTrainedWeights(batch_normalization_35_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_25_w_path =  dir_prefix + std::string("conv2d_25_w.bin"); 
-void* conv2d_25_w =  readTrainedWeights(conv2d_25_w_path.c_str(), 0,576,96,1,1); 
-std::string batch_normalization_36_gamma_path =  dir_prefix + std::string("batch_normalization_36_gamma.bin"); 
-void* batch_normalization_36_gamma =  readTrainedWeights(batch_normalization_36_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_36_beta_path =  dir_prefix + std::string("batch_normalization_36_beta.bin"); 
-void* batch_normalization_36_beta =  readTrainedWeights(batch_normalization_36_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_36_mean_path =  dir_prefix + std::string("batch_normalization_36_mean.bin"); 
-void* batch_normalization_36_mean =  readTrainedWeights(batch_normalization_36_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_36_variance_path =  dir_prefix + std::string("batch_normalization_36_variance.bin"); 
-void* batch_normalization_36_variance =  readTrainedWeights(batch_normalization_36_variance_path.c_str(), 0,1,576,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,576,1,3,3); 
-std::string batch_normalization_37_gamma_path =  dir_prefix + std::string("batch_normalization_37_gamma.bin"); 
-void* batch_normalization_37_gamma =  readTrainedWeights(batch_normalization_37_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_37_beta_path =  dir_prefix + std::string("batch_normalization_37_beta.bin"); 
-void* batch_normalization_37_beta =  readTrainedWeights(batch_normalization_37_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_37_mean_path =  dir_prefix + std::string("batch_normalization_37_mean.bin"); 
-void* batch_normalization_37_mean =  readTrainedWeights(batch_normalization_37_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_37_variance_path =  dir_prefix + std::string("batch_normalization_37_variance.bin"); 
-void* batch_normalization_37_variance =  readTrainedWeights(batch_normalization_37_variance_path.c_str(), 0,1,576,1,1); 
-std::string conv2d_26_w_path =  dir_prefix + std::string("conv2d_26_w.bin"); 
-void* conv2d_26_w =  readTrainedWeights(conv2d_26_w_path.c_str(), 0,96,576,1,1); 
-std::string batch_normalization_38_gamma_path =  dir_prefix + std::string("batch_normalization_38_gamma.bin"); 
-void* batch_normalization_38_gamma =  readTrainedWeights(batch_normalization_38_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_38_beta_path =  dir_prefix + std::string("batch_normalization_38_beta.bin"); 
-void* batch_normalization_38_beta =  readTrainedWeights(batch_normalization_38_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_38_mean_path =  dir_prefix + std::string("batch_normalization_38_mean.bin"); 
-void* batch_normalization_38_mean =  readTrainedWeights(batch_normalization_38_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_38_variance_path =  dir_prefix + std::string("batch_normalization_38_variance.bin"); 
-void* batch_normalization_38_variance =  readTrainedWeights(batch_normalization_38_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_27_w_path =  dir_prefix + std::string("conv2d_27_w.bin"); 
-void* conv2d_27_w =  readTrainedWeights(conv2d_27_w_path.c_str(), 0,576,96,1,1); 
-std::string batch_normalization_39_gamma_path =  dir_prefix + std::string("batch_normalization_39_gamma.bin"); 
-void* batch_normalization_39_gamma =  readTrainedWeights(batch_normalization_39_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_39_beta_path =  dir_prefix + std::string("batch_normalization_39_beta.bin"); 
-void* batch_normalization_39_beta =  readTrainedWeights(batch_normalization_39_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_39_mean_path =  dir_prefix + std::string("batch_normalization_39_mean.bin"); 
-void* batch_normalization_39_mean =  readTrainedWeights(batch_normalization_39_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_39_variance_path =  dir_prefix + std::string("batch_normalization_39_variance.bin"); 
-void* batch_normalization_39_variance =  readTrainedWeights(batch_normalization_39_variance_path.c_str(), 0,1,576,1,1); 
-std::string depthwise_conv2d_14_w_path =  dir_prefix + std::string("depthwise_conv2d_14_w.bin"); 
-void* depthwise_conv2d_14_w =  readTrainedWeights(depthwise_conv2d_14_w_path.c_str(), 0,576,1,3,3); 
-std::string batch_normalization_40_gamma_path =  dir_prefix + std::string("batch_normalization_40_gamma.bin"); 
-void* batch_normalization_40_gamma =  readTrainedWeights(batch_normalization_40_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_40_beta_path =  dir_prefix + std::string("batch_normalization_40_beta.bin"); 
-void* batch_normalization_40_beta =  readTrainedWeights(batch_normalization_40_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_40_mean_path =  dir_prefix + std::string("batch_normalization_40_mean.bin"); 
-void* batch_normalization_40_mean =  readTrainedWeights(batch_normalization_40_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_40_variance_path =  dir_prefix + std::string("batch_normalization_40_variance.bin"); 
-void* batch_normalization_40_variance =  readTrainedWeights(batch_normalization_40_variance_path.c_str(), 0,1,576,1,1); 
-std::string conv2d_28_w_path =  dir_prefix + std::string("conv2d_28_w.bin"); 
-void* conv2d_28_w =  readTrainedWeights(conv2d_28_w_path.c_str(), 0,160,576,1,1); 
-std::string batch_normalization_41_gamma_path =  dir_prefix + std::string("batch_normalization_41_gamma.bin"); 
-void* batch_normalization_41_gamma =  readTrainedWeights(batch_normalization_41_gamma_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_41_beta_path =  dir_prefix + std::string("batch_normalization_41_beta.bin"); 
-void* batch_normalization_41_beta =  readTrainedWeights(batch_normalization_41_beta_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_41_mean_path =  dir_prefix + std::string("batch_normalization_41_mean.bin"); 
-void* batch_normalization_41_mean =  readTrainedWeights(batch_normalization_41_mean_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_41_variance_path =  dir_prefix + std::string("batch_normalization_41_variance.bin"); 
-void* batch_normalization_41_variance =  readTrainedWeights(batch_normalization_41_variance_path.c_str(), 0,1,160,1,1); 
-std::string conv2d_29_w_path =  dir_prefix + std::string("conv2d_29_w.bin"); 
-void* conv2d_29_w =  readTrainedWeights(conv2d_29_w_path.c_str(), 0,960,160,1,1); 
-std::string batch_normalization_42_gamma_path =  dir_prefix + std::string("batch_normalization_42_gamma.bin"); 
-void* batch_normalization_42_gamma =  readTrainedWeights(batch_normalization_42_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_42_beta_path =  dir_prefix + std::string("batch_normalization_42_beta.bin"); 
-void* batch_normalization_42_beta =  readTrainedWeights(batch_normalization_42_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_42_mean_path =  dir_prefix + std::string("batch_normalization_42_mean.bin"); 
-void* batch_normalization_42_mean =  readTrainedWeights(batch_normalization_42_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_42_variance_path =  dir_prefix + std::string("batch_normalization_42_variance.bin"); 
-void* batch_normalization_42_variance =  readTrainedWeights(batch_normalization_42_variance_path.c_str(), 0,1,960,1,1); 
-std::string depthwise_conv2d_15_w_path =  dir_prefix + std::string("depthwise_conv2d_15_w.bin"); 
-void* depthwise_conv2d_15_w =  readTrainedWeights(depthwise_conv2d_15_w_path.c_str(), 0,960,1,3,3); 
-std::string batch_normalization_43_gamma_path =  dir_prefix + std::string("batch_normalization_43_gamma.bin"); 
-void* batch_normalization_43_gamma =  readTrainedWeights(batch_normalization_43_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_43_beta_path =  dir_prefix + std::string("batch_normalization_43_beta.bin"); 
-void* batch_normalization_43_beta =  readTrainedWeights(batch_normalization_43_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_43_mean_path =  dir_prefix + std::string("batch_normalization_43_mean.bin"); 
-void* batch_normalization_43_mean =  readTrainedWeights(batch_normalization_43_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_43_variance_path =  dir_prefix + std::string("batch_normalization_43_variance.bin"); 
-void* batch_normalization_43_variance =  readTrainedWeights(batch_normalization_43_variance_path.c_str(), 0,1,960,1,1); 
-std::string conv2d_30_w_path =  dir_prefix + std::string("conv2d_30_w.bin"); 
-void* conv2d_30_w =  readTrainedWeights(conv2d_30_w_path.c_str(), 0,160,960,1,1); 
-std::string batch_normalization_44_gamma_path =  dir_prefix + std::string("batch_normalization_44_gamma.bin"); 
-void* batch_normalization_44_gamma =  readTrainedWeights(batch_normalization_44_gamma_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_44_beta_path =  dir_prefix + std::string("batch_normalization_44_beta.bin"); 
-void* batch_normalization_44_beta =  readTrainedWeights(batch_normalization_44_beta_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_44_mean_path =  dir_prefix + std::string("batch_normalization_44_mean.bin"); 
-void* batch_normalization_44_mean =  readTrainedWeights(batch_normalization_44_mean_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_44_variance_path =  dir_prefix + std::string("batch_normalization_44_variance.bin"); 
-void* batch_normalization_44_variance =  readTrainedWeights(batch_normalization_44_variance_path.c_str(), 0,1,160,1,1); 
-std::string conv2d_31_w_path =  dir_prefix + std::string("conv2d_31_w.bin"); 
-void* conv2d_31_w =  readTrainedWeights(conv2d_31_w_path.c_str(), 0,960,160,1,1); 
-std::string batch_normalization_45_gamma_path =  dir_prefix + std::string("batch_normalization_45_gamma.bin"); 
-void* batch_normalization_45_gamma =  readTrainedWeights(batch_normalization_45_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_45_beta_path =  dir_prefix + std::string("batch_normalization_45_beta.bin"); 
-void* batch_normalization_45_beta =  readTrainedWeights(batch_normalization_45_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_45_mean_path =  dir_prefix + std::string("batch_normalization_45_mean.bin"); 
-void* batch_normalization_45_mean =  readTrainedWeights(batch_normalization_45_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_45_variance_path =  dir_prefix + std::string("batch_normalization_45_variance.bin"); 
-void* batch_normalization_45_variance =  readTrainedWeights(batch_normalization_45_variance_path.c_str(), 0,1,960,1,1); 
-std::string depthwise_conv2d_16_w_path =  dir_prefix + std::string("depthwise_conv2d_16_w.bin"); 
-void* depthwise_conv2d_16_w =  readTrainedWeights(depthwise_conv2d_16_w_path.c_str(), 0,960,1,3,3); 
-std::string batch_normalization_46_gamma_path =  dir_prefix + std::string("batch_normalization_46_gamma.bin"); 
-void* batch_normalization_46_gamma =  readTrainedWeights(batch_normalization_46_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_46_beta_path =  dir_prefix + std::string("batch_normalization_46_beta.bin"); 
-void* batch_normalization_46_beta =  readTrainedWeights(batch_normalization_46_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_46_mean_path =  dir_prefix + std::string("batch_normalization_46_mean.bin"); 
-void* batch_normalization_46_mean =  readTrainedWeights(batch_normalization_46_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_46_variance_path =  dir_prefix + std::string("batch_normalization_46_variance.bin"); 
-void* batch_normalization_46_variance =  readTrainedWeights(batch_normalization_46_variance_path.c_str(), 0,1,960,1,1); 
-std::string conv2d_32_w_path =  dir_prefix + std::string("conv2d_32_w.bin"); 
-void* conv2d_32_w =  readTrainedWeights(conv2d_32_w_path.c_str(), 0,160,960,1,1); 
-std::string batch_normalization_47_gamma_path =  dir_prefix + std::string("batch_normalization_47_gamma.bin"); 
-void* batch_normalization_47_gamma =  readTrainedWeights(batch_normalization_47_gamma_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_47_beta_path =  dir_prefix + std::string("batch_normalization_47_beta.bin"); 
-void* batch_normalization_47_beta =  readTrainedWeights(batch_normalization_47_beta_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_47_mean_path =  dir_prefix + std::string("batch_normalization_47_mean.bin"); 
-void* batch_normalization_47_mean =  readTrainedWeights(batch_normalization_47_mean_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_47_variance_path =  dir_prefix + std::string("batch_normalization_47_variance.bin"); 
-void* batch_normalization_47_variance =  readTrainedWeights(batch_normalization_47_variance_path.c_str(), 0,1,160,1,1); 
-std::string conv2d_33_w_path =  dir_prefix + std::string("conv2d_33_w.bin"); 
-void* conv2d_33_w =  readTrainedWeights(conv2d_33_w_path.c_str(), 0,960,160,1,1); 
-std::string batch_normalization_48_gamma_path =  dir_prefix + std::string("batch_normalization_48_gamma.bin"); 
-void* batch_normalization_48_gamma =  readTrainedWeights(batch_normalization_48_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_48_beta_path =  dir_prefix + std::string("batch_normalization_48_beta.bin"); 
-void* batch_normalization_48_beta =  readTrainedWeights(batch_normalization_48_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_48_mean_path =  dir_prefix + std::string("batch_normalization_48_mean.bin"); 
-void* batch_normalization_48_mean =  readTrainedWeights(batch_normalization_48_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_48_variance_path =  dir_prefix + std::string("batch_normalization_48_variance.bin"); 
-void* batch_normalization_48_variance =  readTrainedWeights(batch_normalization_48_variance_path.c_str(), 0,1,960,1,1); 
-std::string depthwise_conv2d_17_w_path =  dir_prefix + std::string("depthwise_conv2d_17_w.bin"); 
-void* depthwise_conv2d_17_w =  readTrainedWeights(depthwise_conv2d_17_w_path.c_str(), 0,960,1,3,3); 
-std::string batch_normalization_49_gamma_path =  dir_prefix + std::string("batch_normalization_49_gamma.bin"); 
-void* batch_normalization_49_gamma =  readTrainedWeights(batch_normalization_49_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_49_beta_path =  dir_prefix + std::string("batch_normalization_49_beta.bin"); 
-void* batch_normalization_49_beta =  readTrainedWeights(batch_normalization_49_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_49_mean_path =  dir_prefix + std::string("batch_normalization_49_mean.bin"); 
-void* batch_normalization_49_mean =  readTrainedWeights(batch_normalization_49_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_49_variance_path =  dir_prefix + std::string("batch_normalization_49_variance.bin"); 
-void* batch_normalization_49_variance =  readTrainedWeights(batch_normalization_49_variance_path.c_str(), 0,1,960,1,1); 
-std::string conv2d_34_w_path =  dir_prefix + std::string("conv2d_34_w.bin"); 
-void* conv2d_34_w =  readTrainedWeights(conv2d_34_w_path.c_str(), 0,320,960,1,1); 
-std::string batch_normalization_50_gamma_path =  dir_prefix + std::string("batch_normalization_50_gamma.bin"); 
-void* batch_normalization_50_gamma =  readTrainedWeights(batch_normalization_50_gamma_path.c_str(), 0,1,320,1,1); 
-std::string batch_normalization_50_beta_path =  dir_prefix + std::string("batch_normalization_50_beta.bin"); 
-void* batch_normalization_50_beta =  readTrainedWeights(batch_normalization_50_beta_path.c_str(), 0,1,320,1,1); 
-std::string batch_normalization_50_mean_path =  dir_prefix + std::string("batch_normalization_50_mean.bin"); 
-void* batch_normalization_50_mean =  readTrainedWeights(batch_normalization_50_mean_path.c_str(), 0,1,320,1,1); 
-std::string batch_normalization_50_variance_path =  dir_prefix + std::string("batch_normalization_50_variance.bin"); 
-void* batch_normalization_50_variance =  readTrainedWeights(batch_normalization_50_variance_path.c_str(), 0,1,320,1,1); 
-std::string conv2d_35_w_path =  dir_prefix + std::string("conv2d_35_w.bin"); 
-void* conv2d_35_w =  readTrainedWeights(conv2d_35_w_path.c_str(), 0,1280,320,1,1); 
-std::string batch_normalization_51_gamma_path =  dir_prefix + std::string("batch_normalization_51_gamma.bin"); 
-void* batch_normalization_51_gamma =  readTrainedWeights(batch_normalization_51_gamma_path.c_str(), 0,1,1280,1,1); 
-std::string batch_normalization_51_beta_path =  dir_prefix + std::string("batch_normalization_51_beta.bin"); 
-void* batch_normalization_51_beta =  readTrainedWeights(batch_normalization_51_beta_path.c_str(), 0,1,1280,1,1); 
-std::string batch_normalization_51_mean_path =  dir_prefix + std::string("batch_normalization_51_mean.bin"); 
-void* batch_normalization_51_mean =  readTrainedWeights(batch_normalization_51_mean_path.c_str(), 0,1,1280,1,1); 
-std::string batch_normalization_51_variance_path =  dir_prefix + std::string("batch_normalization_51_variance.bin"); 
-void* batch_normalization_51_variance =  readTrainedWeights(batch_normalization_51_variance_path.c_str(), 0,1,1280,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,5120,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = ConvLayer_PROMISE(input, -1.9105923, 2.145039, conv2d_1_w, -1.6180872917175293, 1.117160677909851, NULL, 0, 0, 1, 1, 1, 1, -1, 0, -1, -30.515915058135988, 30.680313323974644, 9); 
-void* var_1 = tensorConvolution(var_0, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_2 = tensorBatchNorm(var_1, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_3 = tensorRelu(var_2); 
-void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 5.59976006364824, conv2d_2_w, -1.1370596212148665, 1.3073962382078181, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -14.505918518066407, 13.629034059524539, 9); 
-void* var_5 = tensorBatchNorm(var_4, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_6 = ConvLayer_PROMISE(var_5, -4.309355854988098, 3.900550650119789, conv2d_3_w, -1.6225290149450302, 1.2627512609958669, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -18.078887981414795, 15.030233385086063, 9); 
-void* var_7 = tensorBatchNorm(var_6, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_8 = tensorRelu(var_7); 
-void* var_9 = tensorConvolution(var_8, depthwise_conv2d_2_w, 1, 1, 1, 1, 1, 96); 
-void* var_10 = tensorBatchNorm(var_9, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_11 = tensorRelu(var_10); 
-void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.8726992659569675, conv2d_4_w, -0.7004256679415704, 0.7406479438543322, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -12.122967027664185, 12.041194002151496, 9); 
-void* var_13 = tensorBatchNorm(var_12, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_14 = ConvLayer_PROMISE(var_13, -5.584836505889893, 5.1076841955185035, conv2d_5_w, -0.5288015562295914, 0.6439660099148974, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -7.6197896194458, 9.064867986679104, 9); 
-void* var_15 = tensorBatchNorm(var_14, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_16 = tensorRelu(var_15); 
-void* var_17 = tensorConvolution(var_16, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 144); 
-void* var_18 = tensorBatchNorm(var_17, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_19 = tensorRelu(var_18); 
-void* var_20 = ConvLayer_PROMISE(var_19, 0.0, 5.017239574909283, conv2d_6_w, -0.41820720136165623, 0.5334677529335075, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -14.035128602981567, 15.852058460235604, 9); 
-void* var_21 = tensorBatchNorm(var_20, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_22 = tensorAdd(var_13, var_21); 
-void* var_23 = ConvLayer_PROMISE(var_22, -6.516137770652771, 6.882242226600651, conv2d_7_w, -0.6695078402757645, 0.7230790755152725, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -12.456909196853637, 13.537109403610287, 9); 
-void* var_24 = tensorBatchNorm(var_23, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_25 = tensorRelu(var_24); 
-void* var_26 = tensorConvolution(var_25, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 144); 
-void* var_27 = tensorBatchNorm(var_26, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_28 = tensorRelu(var_27); 
-void* var_29 = ConvLayer_PROMISE(var_28, 0.0, 4.6885352153778115, conv2d_8_w, -0.515219985961914, 0.4966081013679511, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -10.365011343002319, 13.858240459442193, 9); 
-void* var_30 = tensorBatchNorm(var_29, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_31 = ConvLayer_PROMISE(var_30, -5.659313384056091, 5.353823287963884, conv2d_9_w, -0.38776333206892016, 0.4313740559220367, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -7.494747163772583, 9.150955280303819, 9); 
-void* var_32 = tensorBatchNorm(var_31, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_33 = tensorRelu(var_32); 
-void* var_34 = tensorConvolution(var_33, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 192); 
-void* var_35 = tensorBatchNorm(var_34, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_36 = tensorRelu(var_35); 
-void* var_37 = ConvLayer_PROMISE(var_36, 0.0, 5.240342163562822, conv2d_10_w, -0.3396388011574745, 0.3658320212364196, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -13.428524814605714, 14.668315940856942, 9); 
-void* var_38 = tensorBatchNorm(var_37, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-void* var_39 = tensorAdd(var_30, var_38); 
-void* var_40 = ConvLayer_PROMISE(var_39, -7.257712150573731, 7.189542776107789, conv2d_11_w, -0.4041371369659901, 0.34401592910289736, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -10.049096151351927, 9.779580131530736, 9); 
-void* var_41 = tensorBatchNorm(var_40, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-void* var_42 = tensorRelu(var_41); 
-void* var_43 = tensorConvolution(var_42, depthwise_conv2d_6_w, 1, 1, 1, 1, 1, 192); 
-void* var_44 = tensorBatchNorm(var_43, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-void* var_45 = tensorRelu(var_44); 
-void* var_46 = ConvLayer_PROMISE(var_45, 0.0, 5.171829322815142, conv2d_12_w, -0.3318945516943932, 0.3025509023666382, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -10.416078590393067, 8.875564914703425, 9); 
-void* var_47 = tensorBatchNorm(var_46, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-void* var_48 = tensorAdd(var_39, var_47); 
-void* var_49 = ConvLayer_PROMISE(var_48, -7.8144073562622065, 7.714953693390015, conv2d_13_w, -0.4585379458963871, 0.48376197892427397, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -11.66812041282654, 13.606535158157008, 9); 
-void* var_50 = tensorBatchNorm(var_49, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-void* var_51 = tensorRelu(var_50); 
-void* var_52 = tensorConvolution(var_51, depthwise_conv2d_7_w, 1, 1, 2, 2, 1, 192); 
-void* var_53 = tensorBatchNorm(var_52, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-void* var_54 = tensorRelu(var_53); 
-void* var_55 = ConvLayer_PROMISE(var_54, 0.0, 4.76878218793879, conv2d_14_w, -0.3252002420425415, 0.33277199989557193, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.83636313343048, 5.8209967956542945, 9); 
-void* var_56 = tensorBatchNorm(var_55, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-void* var_57 = ConvLayer_PROMISE(var_56, -4.2116189937591555, 3.667763746261561, conv2d_15_w, -0.21767465770244598, 0.20771052688360275, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.7635746021270755, 4.714454175949097, 9); 
-void* var_58 = tensorBatchNorm(var_57, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-void* var_59 = tensorRelu(var_58); 
-void* var_60 = tensorConvolution(var_59, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 384); 
-void* var_61 = tensorBatchNorm(var_60, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-void* var_62 = tensorRelu(var_61); 
-void* var_63 = ConvLayer_PROMISE(var_62, 0.0, 4.830589411258934, conv2d_16_w, -0.20011539459228517, 0.18881031423807182, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -9.757593467712402, 9.28659096336362, 9); 
-void* var_64 = tensorBatchNorm(var_63, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-void* var_65 = tensorAdd(var_56, var_64); 
-void* var_66 = ConvLayer_PROMISE(var_65, -6.0708443632125855, 5.517500228881749, conv2d_17_w, -0.19379180744290353, 0.202173922583461, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.838886291503906, 6.482348596572901, 9); 
-void* var_67 = tensorBatchNorm(var_66, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-void* var_68 = tensorRelu(var_67); 
-void* var_69 = tensorConvolution(var_68, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 384); 
-void* var_70 = tensorBatchNorm(var_69, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-void* var_71 = tensorRelu(var_70); 
-void* var_72 = ConvLayer_PROMISE(var_71, 0.0, 5.407182216644287, conv2d_18_w, -0.1867049880325794, 0.17993023470044128, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -12.412525278091431, 12.208400741577147, 9); 
-void* var_73 = tensorBatchNorm(var_72, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-void* var_74 = tensorAdd(var_65, var_73); 
-void* var_75 = ConvLayer_PROMISE(var_74, -7.759848834037781, 7.189491007804856, conv2d_19_w, -0.18295864015817642, 0.1774685539305213, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.372270188331604, 8.098179874420175, 9); 
-void* var_76 = tensorBatchNorm(var_75, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-void* var_77 = tensorRelu(var_76); 
-void* var_78 = tensorConvolution(var_77, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 384); 
-void* var_79 = tensorBatchNorm(var_78, batch_normalization_28_gamma, batch_normalization_28_beta, batch_normalization_28_mean, batch_normalization_28_variance, 0.001); 
-void* var_80 = tensorRelu(var_79); 
-void* var_81 = ConvLayer_PROMISE(var_80, 0.0, 5.629057416915913, conv2d_20_w, -0.16942347586154938, 0.17993337959051173, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -7.798038047790526, 8.919698917388772, 9); 
-void* var_82 = tensorBatchNorm(var_81, batch_normalization_29_gamma, batch_normalization_29_beta, batch_normalization_29_mean, batch_normalization_29_variance, 0.001); 
-void* var_83 = tensorAdd(var_74, var_82); 
-void* var_84 = ConvLayer_PROMISE(var_83, -9.430036806106568, 8.332214206695502, conv2d_21_w, -0.24210206493735315, 0.2529735609889023, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -11.151032608032226, 14.833457197189365, 9); 
-void* var_85 = tensorBatchNorm(var_84, batch_normalization_30_gamma, batch_normalization_30_beta, batch_normalization_30_mean, batch_normalization_30_variance, 0.001); 
-void* var_86 = tensorRelu(var_85); 
-void* var_87 = tensorConvolution(var_86, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 384); 
-void* var_88 = tensorBatchNorm(var_87, batch_normalization_31_gamma, batch_normalization_31_beta, batch_normalization_31_mean, batch_normalization_31_variance, 0.001); 
-void* var_89 = tensorRelu(var_88); 
-void* var_90 = ConvLayer_PROMISE(var_89, 0.0, 5.798735237121775, conv2d_22_w, -0.1911477698981762, 0.19145021069049817, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.568199612617493, 9.707792984008904, 9); 
-void* var_91 = tensorBatchNorm(var_90, batch_normalization_32_gamma, batch_normalization_32_beta, batch_normalization_32_mean, batch_normalization_32_variance, 0.001); 
-void* var_92 = ConvLayer_PROMISE(var_91, -3.6258800530433657, 4.978443423271225, conv2d_23_w, -0.14561383947730064, 0.14606056764721925, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.058748731613159, 5.207366097450073, 9); 
-void* var_93 = tensorBatchNorm(var_92, batch_normalization_33_gamma, batch_normalization_33_beta, batch_normalization_33_mean, batch_normalization_33_variance, 0.001); 
-void* var_94 = tensorRelu(var_93); 
-void* var_95 = tensorConvolution(var_94, depthwise_conv2d_12_w, 1, 1, 1, 1, 1, 576); 
-void* var_96 = tensorBatchNorm(var_95, batch_normalization_34_gamma, batch_normalization_34_beta, batch_normalization_34_mean, batch_normalization_34_variance, 0.001); 
-void* var_97 = tensorRelu(var_96); 
-void* var_98 = ConvLayer_PROMISE(var_97, 0.0, 6.51658540868771, conv2d_24_w, -0.14010273113846777, 0.133546221256256, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -16.38492376708984, 12.431352186203089, 9); 
-void* var_99 = tensorBatchNorm(var_98, batch_normalization_35_gamma, batch_normalization_35_beta, batch_normalization_35_mean, batch_normalization_35_variance, 0.001); 
-void* var_100 = tensorAdd(var_91, var_99); 
-void* var_101 = ConvLayer_PROMISE(var_100, -6.121079467773438, 6.870752349853518, conv2d_25_w, -0.13994703620672228, 0.14077512532472705, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.201230587005615, 7.182718342780955, 9); 
-void* var_102 = tensorBatchNorm(var_101, batch_normalization_36_gamma, batch_normalization_36_beta, batch_normalization_36_mean, batch_normalization_36_variance, 0.001); 
-void* var_103 = tensorRelu(var_102); 
-void* var_104 = tensorConvolution(var_103, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 576); 
-void* var_105 = tensorBatchNorm(var_104, batch_normalization_37_gamma, batch_normalization_37_beta, batch_normalization_37_mean, batch_normalization_37_variance, 0.001); 
-void* var_106 = tensorRelu(var_105); 
-void* var_107 = ConvLayer_PROMISE(var_106, 0.0, 6.78510052871718, conv2d_26_w, -0.1351969686150551, 0.12828950628638264, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -12.989627714157105, 12.089402362823513, 9); 
-void* var_108 = tensorBatchNorm(var_107, batch_normalization_38_gamma, batch_normalization_38_beta, batch_normalization_38_mean, batch_normalization_38_variance, 0.001); 
-void* var_109 = tensorAdd(var_100, var_108); 
-void* var_110 = ConvLayer_PROMISE(var_109, -7.789269973754883, 7.7248824281693, conv2d_27_w, -0.1802013835310936, 0.17725828483700806, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -8.86290655517578, 11.057712678909091, 9); 
-void* var_111 = tensorBatchNorm(var_110, batch_normalization_39_gamma, batch_normalization_39_beta, batch_normalization_39_mean, batch_normalization_39_variance, 0.001); 
-void* var_112 = tensorRelu(var_111); 
-void* var_113 = tensorConvolution(var_112, depthwise_conv2d_14_w, 1, 1, 2, 2, 1, 576); 
-void* var_114 = tensorBatchNorm(var_113, batch_normalization_40_gamma, batch_normalization_40_beta, batch_normalization_40_mean, batch_normalization_40_variance, 0.001); 
-void* var_115 = tensorRelu(var_114); 
-void* var_116 = ConvLayer_PROMISE(var_115, 0.0, 7.198854037761778, conv2d_28_w, -0.1413962979018688, 0.13931855550408365, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -9.692179286956787, 7.801771405220015, 9); 
-void* var_117 = tensorBatchNorm(var_116, batch_normalization_41_gamma, batch_normalization_41_beta, batch_normalization_41_mean, batch_normalization_41_variance, 0.001); 
-void* var_118 = ConvLayer_PROMISE(var_117, -5.154439496040343, 4.208310750007554, conv2d_29_w, -0.10713684476912022, 0.10857602393627158, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.840223340034485, 3.906086678028145, 9); 
-void* var_119 = tensorBatchNorm(var_118, batch_normalization_42_gamma, batch_normalization_42_beta, batch_normalization_42_mean, batch_normalization_42_variance, 0.001); 
-void* var_120 = tensorRelu(var_119); 
-void* var_121 = tensorConvolution(var_120, depthwise_conv2d_15_w, 1, 1, 1, 1, 1, 960); 
-void* var_122 = tensorBatchNorm(var_121, batch_normalization_43_gamma, batch_normalization_43_beta, batch_normalization_43_mean, batch_normalization_43_variance, 0.001); 
-void* var_123 = tensorRelu(var_122); 
-void* var_124 = ConvLayer_PROMISE(var_123, 0.0, 5.625304239749944, conv2d_30_w, -0.10913688711822034, 0.10753180256485936, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -7.80785062122345, 9.989215379714604, 9); 
-void* var_125 = tensorBatchNorm(var_124, batch_normalization_44_gamma, batch_normalization_44_beta, batch_normalization_44_mean, batch_normalization_44_variance, 0.001); 
-void* var_126 = tensorAdd(var_117, var_125); 
-void* var_127 = ConvLayer_PROMISE(var_126, -4.683857499122619, 4.920808605194038, conv2d_31_w, -0.11377229495346546, 0.10955536790192154, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -4.129921095848084, 3.6074319915773216, 9); 
-void* var_128 = tensorBatchNorm(var_127, batch_normalization_45_gamma, batch_normalization_45_beta, batch_normalization_45_mean, batch_normalization_45_variance, 0.001); 
-void* var_129 = tensorRelu(var_128); 
-void* var_130 = tensorConvolution(var_129, depthwise_conv2d_16_w, 1, 1, 1, 1, 1, 960); 
-void* var_131 = tensorBatchNorm(var_130, batch_normalization_46_gamma, batch_normalization_46_beta, batch_normalization_46_mean, batch_normalization_46_variance, 0.001); 
-void* var_132 = tensorRelu(var_131); 
-void* var_133 = ConvLayer_PROMISE(var_132, 0.0, 4.8969989051828975, conv2d_32_w, -0.10555544766783714, 0.10780695463716974, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -2.947932963848114, 3.134355350017495, 9); 
-void* var_134 = tensorBatchNorm(var_133, batch_normalization_47_gamma, batch_normalization_47_beta, batch_normalization_47_mean, batch_normalization_47_variance, 0.001); 
-void* var_135 = tensorAdd(var_126, var_134); 
-void* var_136 = ConvLayer_PROMISE(var_135, -5.496015277862549, 5.680448228836048, conv2d_33_w, -0.11888585649430752, 0.11954810336232179, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -6.086169843673707, 5.468563261032134, 9); 
-void* var_137 = tensorBatchNorm(var_136, batch_normalization_48_gamma, batch_normalization_48_beta, batch_normalization_48_mean, batch_normalization_48_variance, 0.001); 
-void* var_138 = tensorRelu(var_137); 
-void* var_139 = tensorConvolution(var_138, depthwise_conv2d_17_w, 1, 1, 1, 1, 1, 960); 
-void* var_140 = tensorBatchNorm(var_139, batch_normalization_49_gamma, batch_normalization_49_beta, batch_normalization_49_mean, batch_normalization_49_variance, 0.001); 
-void* var_141 = tensorRelu(var_140); 
-void* var_142 = ConvLayer_PROMISE(var_141, 0.0, 6.855439195632954, conv2d_34_w, -0.09301538287103175, 0.09316299571096909, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -3.198445457458496, 3.939841930866389, 9); 
-void* var_143 = tensorBatchNorm(var_142, batch_normalization_50_gamma, batch_normalization_50_beta, batch_normalization_50_mean, batch_normalization_50_variance, 0.001); 
-void* var_144 = ConvLayer_PROMISE(var_143, -2.4313668818473815, 2.9796178574562893, conv2d_35_w, -0.07986876694858074, 0.07952085809409648, NULL, 0, 0, 0, 0, 1, 1, -1, 0, -1, -13.93901468849182, 1.7606397964954392, 9); 
-void* var_145 = tensorBatchNorm(var_144, batch_normalization_51_gamma, batch_normalization_51_beta, batch_normalization_51_mean, batch_normalization_51_variance, 0.001); 
-void* var_146 = tensorRelu(var_145); 
-void* var_147 = tensorPooling(var_146,1,2,2,0,0,2,2); 
-void* var_148 = FCLayer_PROMISE(var_147, 0.0, 0.8575248373746913, dense_1_w, -0.12000246234238149, 0.12751513716578475, dense_1_b, -0.15739505, 0.23266713, -1, -9.031755617141723, 13.669795604705882, 9); 
-void* var_149 = tensorSoftmax(var_148); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_149); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-}
-
-dumpExecutionAccuracies(); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/src.cc
deleted file mode 100644
index 47fdea27a8376b9e68365c674ee09419c763c651..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/mobilenetv2_quant/src.cc
+++ /dev/null
@@ -1,721 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("data/mobilenetv2_quant/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,32,3,3,3); 
-std::string depthwise_conv2d_1_w_path =  dir_prefix + std::string("depthwise_conv2d_1_w.bin"); 
-void* depthwise_conv2d_1_w =  readTrainedWeights(depthwise_conv2d_1_w_path.c_str(), 0,32,1,3,3); 
-std::string batch_normalization_1_gamma_path =  dir_prefix + std::string("batch_normalization_1_gamma.bin"); 
-void* batch_normalization_1_gamma =  readTrainedWeights(batch_normalization_1_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_beta_path =  dir_prefix + std::string("batch_normalization_1_beta.bin"); 
-void* batch_normalization_1_beta =  readTrainedWeights(batch_normalization_1_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_mean_path =  dir_prefix + std::string("batch_normalization_1_mean.bin"); 
-void* batch_normalization_1_mean =  readTrainedWeights(batch_normalization_1_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_1_variance_path =  dir_prefix + std::string("batch_normalization_1_variance.bin"); 
-void* batch_normalization_1_variance =  readTrainedWeights(batch_normalization_1_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,32,1,1); 
-std::string batch_normalization_2_gamma_path =  dir_prefix + std::string("batch_normalization_2_gamma.bin"); 
-void* batch_normalization_2_gamma =  readTrainedWeights(batch_normalization_2_gamma_path.c_str(), 0,1,16,1,1); 
-std::string batch_normalization_2_beta_path =  dir_prefix + std::string("batch_normalization_2_beta.bin"); 
-void* batch_normalization_2_beta =  readTrainedWeights(batch_normalization_2_beta_path.c_str(), 0,1,16,1,1); 
-std::string batch_normalization_2_mean_path =  dir_prefix + std::string("batch_normalization_2_mean.bin"); 
-void* batch_normalization_2_mean =  readTrainedWeights(batch_normalization_2_mean_path.c_str(), 0,1,16,1,1); 
-std::string batch_normalization_2_variance_path =  dir_prefix + std::string("batch_normalization_2_variance.bin"); 
-void* batch_normalization_2_variance =  readTrainedWeights(batch_normalization_2_variance_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,96,16,1,1); 
-std::string batch_normalization_3_gamma_path =  dir_prefix + std::string("batch_normalization_3_gamma.bin"); 
-void* batch_normalization_3_gamma =  readTrainedWeights(batch_normalization_3_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_3_beta_path =  dir_prefix + std::string("batch_normalization_3_beta.bin"); 
-void* batch_normalization_3_beta =  readTrainedWeights(batch_normalization_3_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_3_mean_path =  dir_prefix + std::string("batch_normalization_3_mean.bin"); 
-void* batch_normalization_3_mean =  readTrainedWeights(batch_normalization_3_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_3_variance_path =  dir_prefix + std::string("batch_normalization_3_variance.bin"); 
-void* batch_normalization_3_variance =  readTrainedWeights(batch_normalization_3_variance_path.c_str(), 0,1,96,1,1); 
-std::string depthwise_conv2d_2_w_path =  dir_prefix + std::string("depthwise_conv2d_2_w.bin"); 
-void* depthwise_conv2d_2_w =  readTrainedWeights(depthwise_conv2d_2_w_path.c_str(), 0,96,1,3,3); 
-std::string batch_normalization_4_gamma_path =  dir_prefix + std::string("batch_normalization_4_gamma.bin"); 
-void* batch_normalization_4_gamma =  readTrainedWeights(batch_normalization_4_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_4_beta_path =  dir_prefix + std::string("batch_normalization_4_beta.bin"); 
-void* batch_normalization_4_beta =  readTrainedWeights(batch_normalization_4_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_4_mean_path =  dir_prefix + std::string("batch_normalization_4_mean.bin"); 
-void* batch_normalization_4_mean =  readTrainedWeights(batch_normalization_4_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_4_variance_path =  dir_prefix + std::string("batch_normalization_4_variance.bin"); 
-void* batch_normalization_4_variance =  readTrainedWeights(batch_normalization_4_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,24,96,1,1); 
-std::string batch_normalization_5_gamma_path =  dir_prefix + std::string("batch_normalization_5_gamma.bin"); 
-void* batch_normalization_5_gamma =  readTrainedWeights(batch_normalization_5_gamma_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_5_beta_path =  dir_prefix + std::string("batch_normalization_5_beta.bin"); 
-void* batch_normalization_5_beta =  readTrainedWeights(batch_normalization_5_beta_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_5_mean_path =  dir_prefix + std::string("batch_normalization_5_mean.bin"); 
-void* batch_normalization_5_mean =  readTrainedWeights(batch_normalization_5_mean_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_5_variance_path =  dir_prefix + std::string("batch_normalization_5_variance.bin"); 
-void* batch_normalization_5_variance =  readTrainedWeights(batch_normalization_5_variance_path.c_str(), 0,1,24,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,144,24,1,1); 
-std::string batch_normalization_6_gamma_path =  dir_prefix + std::string("batch_normalization_6_gamma.bin"); 
-void* batch_normalization_6_gamma =  readTrainedWeights(batch_normalization_6_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_6_beta_path =  dir_prefix + std::string("batch_normalization_6_beta.bin"); 
-void* batch_normalization_6_beta =  readTrainedWeights(batch_normalization_6_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_6_mean_path =  dir_prefix + std::string("batch_normalization_6_mean.bin"); 
-void* batch_normalization_6_mean =  readTrainedWeights(batch_normalization_6_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_6_variance_path =  dir_prefix + std::string("batch_normalization_6_variance.bin"); 
-void* batch_normalization_6_variance =  readTrainedWeights(batch_normalization_6_variance_path.c_str(), 0,1,144,1,1); 
-std::string depthwise_conv2d_3_w_path =  dir_prefix + std::string("depthwise_conv2d_3_w.bin"); 
-void* depthwise_conv2d_3_w =  readTrainedWeights(depthwise_conv2d_3_w_path.c_str(), 0,144,1,3,3); 
-std::string batch_normalization_7_gamma_path =  dir_prefix + std::string("batch_normalization_7_gamma.bin"); 
-void* batch_normalization_7_gamma =  readTrainedWeights(batch_normalization_7_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_7_beta_path =  dir_prefix + std::string("batch_normalization_7_beta.bin"); 
-void* batch_normalization_7_beta =  readTrainedWeights(batch_normalization_7_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_7_mean_path =  dir_prefix + std::string("batch_normalization_7_mean.bin"); 
-void* batch_normalization_7_mean =  readTrainedWeights(batch_normalization_7_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_7_variance_path =  dir_prefix + std::string("batch_normalization_7_variance.bin"); 
-void* batch_normalization_7_variance =  readTrainedWeights(batch_normalization_7_variance_path.c_str(), 0,1,144,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,24,144,1,1); 
-std::string batch_normalization_8_gamma_path =  dir_prefix + std::string("batch_normalization_8_gamma.bin"); 
-void* batch_normalization_8_gamma =  readTrainedWeights(batch_normalization_8_gamma_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_8_beta_path =  dir_prefix + std::string("batch_normalization_8_beta.bin"); 
-void* batch_normalization_8_beta =  readTrainedWeights(batch_normalization_8_beta_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_8_mean_path =  dir_prefix + std::string("batch_normalization_8_mean.bin"); 
-void* batch_normalization_8_mean =  readTrainedWeights(batch_normalization_8_mean_path.c_str(), 0,1,24,1,1); 
-std::string batch_normalization_8_variance_path =  dir_prefix + std::string("batch_normalization_8_variance.bin"); 
-void* batch_normalization_8_variance =  readTrainedWeights(batch_normalization_8_variance_path.c_str(), 0,1,24,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,144,24,1,1); 
-std::string batch_normalization_9_gamma_path =  dir_prefix + std::string("batch_normalization_9_gamma.bin"); 
-void* batch_normalization_9_gamma =  readTrainedWeights(batch_normalization_9_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_9_beta_path =  dir_prefix + std::string("batch_normalization_9_beta.bin"); 
-void* batch_normalization_9_beta =  readTrainedWeights(batch_normalization_9_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_9_mean_path =  dir_prefix + std::string("batch_normalization_9_mean.bin"); 
-void* batch_normalization_9_mean =  readTrainedWeights(batch_normalization_9_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_9_variance_path =  dir_prefix + std::string("batch_normalization_9_variance.bin"); 
-void* batch_normalization_9_variance =  readTrainedWeights(batch_normalization_9_variance_path.c_str(), 0,1,144,1,1); 
-std::string depthwise_conv2d_4_w_path =  dir_prefix + std::string("depthwise_conv2d_4_w.bin"); 
-void* depthwise_conv2d_4_w =  readTrainedWeights(depthwise_conv2d_4_w_path.c_str(), 0,144,1,3,3); 
-std::string batch_normalization_10_gamma_path =  dir_prefix + std::string("batch_normalization_10_gamma.bin"); 
-void* batch_normalization_10_gamma =  readTrainedWeights(batch_normalization_10_gamma_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_10_beta_path =  dir_prefix + std::string("batch_normalization_10_beta.bin"); 
-void* batch_normalization_10_beta =  readTrainedWeights(batch_normalization_10_beta_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_10_mean_path =  dir_prefix + std::string("batch_normalization_10_mean.bin"); 
-void* batch_normalization_10_mean =  readTrainedWeights(batch_normalization_10_mean_path.c_str(), 0,1,144,1,1); 
-std::string batch_normalization_10_variance_path =  dir_prefix + std::string("batch_normalization_10_variance.bin"); 
-void* batch_normalization_10_variance =  readTrainedWeights(batch_normalization_10_variance_path.c_str(), 0,1,144,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,144,1,1); 
-std::string batch_normalization_11_gamma_path =  dir_prefix + std::string("batch_normalization_11_gamma.bin"); 
-void* batch_normalization_11_gamma =  readTrainedWeights(batch_normalization_11_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_11_beta_path =  dir_prefix + std::string("batch_normalization_11_beta.bin"); 
-void* batch_normalization_11_beta =  readTrainedWeights(batch_normalization_11_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_11_mean_path =  dir_prefix + std::string("batch_normalization_11_mean.bin"); 
-void* batch_normalization_11_mean =  readTrainedWeights(batch_normalization_11_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_11_variance_path =  dir_prefix + std::string("batch_normalization_11_variance.bin"); 
-void* batch_normalization_11_variance =  readTrainedWeights(batch_normalization_11_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,192,32,1,1); 
-std::string batch_normalization_12_gamma_path =  dir_prefix + std::string("batch_normalization_12_gamma.bin"); 
-void* batch_normalization_12_gamma =  readTrainedWeights(batch_normalization_12_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_12_beta_path =  dir_prefix + std::string("batch_normalization_12_beta.bin"); 
-void* batch_normalization_12_beta =  readTrainedWeights(batch_normalization_12_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_12_mean_path =  dir_prefix + std::string("batch_normalization_12_mean.bin"); 
-void* batch_normalization_12_mean =  readTrainedWeights(batch_normalization_12_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_12_variance_path =  dir_prefix + std::string("batch_normalization_12_variance.bin"); 
-void* batch_normalization_12_variance =  readTrainedWeights(batch_normalization_12_variance_path.c_str(), 0,1,192,1,1); 
-std::string depthwise_conv2d_5_w_path =  dir_prefix + std::string("depthwise_conv2d_5_w.bin"); 
-void* depthwise_conv2d_5_w =  readTrainedWeights(depthwise_conv2d_5_w_path.c_str(), 0,192,1,3,3); 
-std::string batch_normalization_13_gamma_path =  dir_prefix + std::string("batch_normalization_13_gamma.bin"); 
-void* batch_normalization_13_gamma =  readTrainedWeights(batch_normalization_13_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_13_beta_path =  dir_prefix + std::string("batch_normalization_13_beta.bin"); 
-void* batch_normalization_13_beta =  readTrainedWeights(batch_normalization_13_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_13_mean_path =  dir_prefix + std::string("batch_normalization_13_mean.bin"); 
-void* batch_normalization_13_mean =  readTrainedWeights(batch_normalization_13_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_13_variance_path =  dir_prefix + std::string("batch_normalization_13_variance.bin"); 
-void* batch_normalization_13_variance =  readTrainedWeights(batch_normalization_13_variance_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,192,1,1); 
-std::string batch_normalization_14_gamma_path =  dir_prefix + std::string("batch_normalization_14_gamma.bin"); 
-void* batch_normalization_14_gamma =  readTrainedWeights(batch_normalization_14_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_14_beta_path =  dir_prefix + std::string("batch_normalization_14_beta.bin"); 
-void* batch_normalization_14_beta =  readTrainedWeights(batch_normalization_14_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_14_mean_path =  dir_prefix + std::string("batch_normalization_14_mean.bin"); 
-void* batch_normalization_14_mean =  readTrainedWeights(batch_normalization_14_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_14_variance_path =  dir_prefix + std::string("batch_normalization_14_variance.bin"); 
-void* batch_normalization_14_variance =  readTrainedWeights(batch_normalization_14_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,192,32,1,1); 
-std::string batch_normalization_15_gamma_path =  dir_prefix + std::string("batch_normalization_15_gamma.bin"); 
-void* batch_normalization_15_gamma =  readTrainedWeights(batch_normalization_15_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_15_beta_path =  dir_prefix + std::string("batch_normalization_15_beta.bin"); 
-void* batch_normalization_15_beta =  readTrainedWeights(batch_normalization_15_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_15_mean_path =  dir_prefix + std::string("batch_normalization_15_mean.bin"); 
-void* batch_normalization_15_mean =  readTrainedWeights(batch_normalization_15_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_15_variance_path =  dir_prefix + std::string("batch_normalization_15_variance.bin"); 
-void* batch_normalization_15_variance =  readTrainedWeights(batch_normalization_15_variance_path.c_str(), 0,1,192,1,1); 
-std::string depthwise_conv2d_6_w_path =  dir_prefix + std::string("depthwise_conv2d_6_w.bin"); 
-void* depthwise_conv2d_6_w =  readTrainedWeights(depthwise_conv2d_6_w_path.c_str(), 0,192,1,3,3); 
-std::string batch_normalization_16_gamma_path =  dir_prefix + std::string("batch_normalization_16_gamma.bin"); 
-void* batch_normalization_16_gamma =  readTrainedWeights(batch_normalization_16_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_16_beta_path =  dir_prefix + std::string("batch_normalization_16_beta.bin"); 
-void* batch_normalization_16_beta =  readTrainedWeights(batch_normalization_16_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_16_mean_path =  dir_prefix + std::string("batch_normalization_16_mean.bin"); 
-void* batch_normalization_16_mean =  readTrainedWeights(batch_normalization_16_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_16_variance_path =  dir_prefix + std::string("batch_normalization_16_variance.bin"); 
-void* batch_normalization_16_variance =  readTrainedWeights(batch_normalization_16_variance_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,192,1,1); 
-std::string batch_normalization_17_gamma_path =  dir_prefix + std::string("batch_normalization_17_gamma.bin"); 
-void* batch_normalization_17_gamma =  readTrainedWeights(batch_normalization_17_gamma_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_17_beta_path =  dir_prefix + std::string("batch_normalization_17_beta.bin"); 
-void* batch_normalization_17_beta =  readTrainedWeights(batch_normalization_17_beta_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_17_mean_path =  dir_prefix + std::string("batch_normalization_17_mean.bin"); 
-void* batch_normalization_17_mean =  readTrainedWeights(batch_normalization_17_mean_path.c_str(), 0,1,32,1,1); 
-std::string batch_normalization_17_variance_path =  dir_prefix + std::string("batch_normalization_17_variance.bin"); 
-void* batch_normalization_17_variance =  readTrainedWeights(batch_normalization_17_variance_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,192,32,1,1); 
-std::string batch_normalization_18_gamma_path =  dir_prefix + std::string("batch_normalization_18_gamma.bin"); 
-void* batch_normalization_18_gamma =  readTrainedWeights(batch_normalization_18_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_18_beta_path =  dir_prefix + std::string("batch_normalization_18_beta.bin"); 
-void* batch_normalization_18_beta =  readTrainedWeights(batch_normalization_18_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_18_mean_path =  dir_prefix + std::string("batch_normalization_18_mean.bin"); 
-void* batch_normalization_18_mean =  readTrainedWeights(batch_normalization_18_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_18_variance_path =  dir_prefix + std::string("batch_normalization_18_variance.bin"); 
-void* batch_normalization_18_variance =  readTrainedWeights(batch_normalization_18_variance_path.c_str(), 0,1,192,1,1); 
-std::string depthwise_conv2d_7_w_path =  dir_prefix + std::string("depthwise_conv2d_7_w.bin"); 
-void* depthwise_conv2d_7_w =  readTrainedWeights(depthwise_conv2d_7_w_path.c_str(), 0,192,1,3,3); 
-std::string batch_normalization_19_gamma_path =  dir_prefix + std::string("batch_normalization_19_gamma.bin"); 
-void* batch_normalization_19_gamma =  readTrainedWeights(batch_normalization_19_gamma_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_19_beta_path =  dir_prefix + std::string("batch_normalization_19_beta.bin"); 
-void* batch_normalization_19_beta =  readTrainedWeights(batch_normalization_19_beta_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_19_mean_path =  dir_prefix + std::string("batch_normalization_19_mean.bin"); 
-void* batch_normalization_19_mean =  readTrainedWeights(batch_normalization_19_mean_path.c_str(), 0,1,192,1,1); 
-std::string batch_normalization_19_variance_path =  dir_prefix + std::string("batch_normalization_19_variance.bin"); 
-void* batch_normalization_19_variance =  readTrainedWeights(batch_normalization_19_variance_path.c_str(), 0,1,192,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,64,192,1,1); 
-std::string batch_normalization_20_gamma_path =  dir_prefix + std::string("batch_normalization_20_gamma.bin"); 
-void* batch_normalization_20_gamma =  readTrainedWeights(batch_normalization_20_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_20_beta_path =  dir_prefix + std::string("batch_normalization_20_beta.bin"); 
-void* batch_normalization_20_beta =  readTrainedWeights(batch_normalization_20_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_20_mean_path =  dir_prefix + std::string("batch_normalization_20_mean.bin"); 
-void* batch_normalization_20_mean =  readTrainedWeights(batch_normalization_20_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_20_variance_path =  dir_prefix + std::string("batch_normalization_20_variance.bin"); 
-void* batch_normalization_20_variance =  readTrainedWeights(batch_normalization_20_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_21_gamma_path =  dir_prefix + std::string("batch_normalization_21_gamma.bin"); 
-void* batch_normalization_21_gamma =  readTrainedWeights(batch_normalization_21_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_21_beta_path =  dir_prefix + std::string("batch_normalization_21_beta.bin"); 
-void* batch_normalization_21_beta =  readTrainedWeights(batch_normalization_21_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_21_mean_path =  dir_prefix + std::string("batch_normalization_21_mean.bin"); 
-void* batch_normalization_21_mean =  readTrainedWeights(batch_normalization_21_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_21_variance_path =  dir_prefix + std::string("batch_normalization_21_variance.bin"); 
-void* batch_normalization_21_variance =  readTrainedWeights(batch_normalization_21_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_8_w_path =  dir_prefix + std::string("depthwise_conv2d_8_w.bin"); 
-void* depthwise_conv2d_8_w =  readTrainedWeights(depthwise_conv2d_8_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_22_gamma_path =  dir_prefix + std::string("batch_normalization_22_gamma.bin"); 
-void* batch_normalization_22_gamma =  readTrainedWeights(batch_normalization_22_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_22_beta_path =  dir_prefix + std::string("batch_normalization_22_beta.bin"); 
-void* batch_normalization_22_beta =  readTrainedWeights(batch_normalization_22_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_22_mean_path =  dir_prefix + std::string("batch_normalization_22_mean.bin"); 
-void* batch_normalization_22_mean =  readTrainedWeights(batch_normalization_22_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_22_variance_path =  dir_prefix + std::string("batch_normalization_22_variance.bin"); 
-void* batch_normalization_22_variance =  readTrainedWeights(batch_normalization_22_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,384,1,1); 
-std::string batch_normalization_23_gamma_path =  dir_prefix + std::string("batch_normalization_23_gamma.bin"); 
-void* batch_normalization_23_gamma =  readTrainedWeights(batch_normalization_23_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_23_beta_path =  dir_prefix + std::string("batch_normalization_23_beta.bin"); 
-void* batch_normalization_23_beta =  readTrainedWeights(batch_normalization_23_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_23_mean_path =  dir_prefix + std::string("batch_normalization_23_mean.bin"); 
-void* batch_normalization_23_mean =  readTrainedWeights(batch_normalization_23_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_23_variance_path =  dir_prefix + std::string("batch_normalization_23_variance.bin"); 
-void* batch_normalization_23_variance =  readTrainedWeights(batch_normalization_23_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_24_gamma_path =  dir_prefix + std::string("batch_normalization_24_gamma.bin"); 
-void* batch_normalization_24_gamma =  readTrainedWeights(batch_normalization_24_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_24_beta_path =  dir_prefix + std::string("batch_normalization_24_beta.bin"); 
-void* batch_normalization_24_beta =  readTrainedWeights(batch_normalization_24_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_24_mean_path =  dir_prefix + std::string("batch_normalization_24_mean.bin"); 
-void* batch_normalization_24_mean =  readTrainedWeights(batch_normalization_24_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_24_variance_path =  dir_prefix + std::string("batch_normalization_24_variance.bin"); 
-void* batch_normalization_24_variance =  readTrainedWeights(batch_normalization_24_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_9_w_path =  dir_prefix + std::string("depthwise_conv2d_9_w.bin"); 
-void* depthwise_conv2d_9_w =  readTrainedWeights(depthwise_conv2d_9_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_25_gamma_path =  dir_prefix + std::string("batch_normalization_25_gamma.bin"); 
-void* batch_normalization_25_gamma =  readTrainedWeights(batch_normalization_25_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_25_beta_path =  dir_prefix + std::string("batch_normalization_25_beta.bin"); 
-void* batch_normalization_25_beta =  readTrainedWeights(batch_normalization_25_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_25_mean_path =  dir_prefix + std::string("batch_normalization_25_mean.bin"); 
-void* batch_normalization_25_mean =  readTrainedWeights(batch_normalization_25_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_25_variance_path =  dir_prefix + std::string("batch_normalization_25_variance.bin"); 
-void* batch_normalization_25_variance =  readTrainedWeights(batch_normalization_25_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,384,1,1); 
-std::string batch_normalization_26_gamma_path =  dir_prefix + std::string("batch_normalization_26_gamma.bin"); 
-void* batch_normalization_26_gamma =  readTrainedWeights(batch_normalization_26_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_26_beta_path =  dir_prefix + std::string("batch_normalization_26_beta.bin"); 
-void* batch_normalization_26_beta =  readTrainedWeights(batch_normalization_26_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_26_mean_path =  dir_prefix + std::string("batch_normalization_26_mean.bin"); 
-void* batch_normalization_26_mean =  readTrainedWeights(batch_normalization_26_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_26_variance_path =  dir_prefix + std::string("batch_normalization_26_variance.bin"); 
-void* batch_normalization_26_variance =  readTrainedWeights(batch_normalization_26_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_27_gamma_path =  dir_prefix + std::string("batch_normalization_27_gamma.bin"); 
-void* batch_normalization_27_gamma =  readTrainedWeights(batch_normalization_27_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_27_beta_path =  dir_prefix + std::string("batch_normalization_27_beta.bin"); 
-void* batch_normalization_27_beta =  readTrainedWeights(batch_normalization_27_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_27_mean_path =  dir_prefix + std::string("batch_normalization_27_mean.bin"); 
-void* batch_normalization_27_mean =  readTrainedWeights(batch_normalization_27_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_27_variance_path =  dir_prefix + std::string("batch_normalization_27_variance.bin"); 
-void* batch_normalization_27_variance =  readTrainedWeights(batch_normalization_27_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_10_w_path =  dir_prefix + std::string("depthwise_conv2d_10_w.bin"); 
-void* depthwise_conv2d_10_w =  readTrainedWeights(depthwise_conv2d_10_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_28_gamma_path =  dir_prefix + std::string("batch_normalization_28_gamma.bin"); 
-void* batch_normalization_28_gamma =  readTrainedWeights(batch_normalization_28_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_28_beta_path =  dir_prefix + std::string("batch_normalization_28_beta.bin"); 
-void* batch_normalization_28_beta =  readTrainedWeights(batch_normalization_28_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_28_mean_path =  dir_prefix + std::string("batch_normalization_28_mean.bin"); 
-void* batch_normalization_28_mean =  readTrainedWeights(batch_normalization_28_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_28_variance_path =  dir_prefix + std::string("batch_normalization_28_variance.bin"); 
-void* batch_normalization_28_variance =  readTrainedWeights(batch_normalization_28_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,384,1,1); 
-std::string batch_normalization_29_gamma_path =  dir_prefix + std::string("batch_normalization_29_gamma.bin"); 
-void* batch_normalization_29_gamma =  readTrainedWeights(batch_normalization_29_gamma_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_29_beta_path =  dir_prefix + std::string("batch_normalization_29_beta.bin"); 
-void* batch_normalization_29_beta =  readTrainedWeights(batch_normalization_29_beta_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_29_mean_path =  dir_prefix + std::string("batch_normalization_29_mean.bin"); 
-void* batch_normalization_29_mean =  readTrainedWeights(batch_normalization_29_mean_path.c_str(), 0,1,64,1,1); 
-std::string batch_normalization_29_variance_path =  dir_prefix + std::string("batch_normalization_29_variance.bin"); 
-void* batch_normalization_29_variance =  readTrainedWeights(batch_normalization_29_variance_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,384,64,1,1); 
-std::string batch_normalization_30_gamma_path =  dir_prefix + std::string("batch_normalization_30_gamma.bin"); 
-void* batch_normalization_30_gamma =  readTrainedWeights(batch_normalization_30_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_30_beta_path =  dir_prefix + std::string("batch_normalization_30_beta.bin"); 
-void* batch_normalization_30_beta =  readTrainedWeights(batch_normalization_30_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_30_mean_path =  dir_prefix + std::string("batch_normalization_30_mean.bin"); 
-void* batch_normalization_30_mean =  readTrainedWeights(batch_normalization_30_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_30_variance_path =  dir_prefix + std::string("batch_normalization_30_variance.bin"); 
-void* batch_normalization_30_variance =  readTrainedWeights(batch_normalization_30_variance_path.c_str(), 0,1,384,1,1); 
-std::string depthwise_conv2d_11_w_path =  dir_prefix + std::string("depthwise_conv2d_11_w.bin"); 
-void* depthwise_conv2d_11_w =  readTrainedWeights(depthwise_conv2d_11_w_path.c_str(), 0,384,1,3,3); 
-std::string batch_normalization_31_gamma_path =  dir_prefix + std::string("batch_normalization_31_gamma.bin"); 
-void* batch_normalization_31_gamma =  readTrainedWeights(batch_normalization_31_gamma_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_31_beta_path =  dir_prefix + std::string("batch_normalization_31_beta.bin"); 
-void* batch_normalization_31_beta =  readTrainedWeights(batch_normalization_31_beta_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_31_mean_path =  dir_prefix + std::string("batch_normalization_31_mean.bin"); 
-void* batch_normalization_31_mean =  readTrainedWeights(batch_normalization_31_mean_path.c_str(), 0,1,384,1,1); 
-std::string batch_normalization_31_variance_path =  dir_prefix + std::string("batch_normalization_31_variance.bin"); 
-void* batch_normalization_31_variance =  readTrainedWeights(batch_normalization_31_variance_path.c_str(), 0,1,384,1,1); 
-std::string conv2d_22_w_path =  dir_prefix + std::string("conv2d_22_w.bin"); 
-void* conv2d_22_w =  readTrainedWeights(conv2d_22_w_path.c_str(), 0,96,384,1,1); 
-std::string batch_normalization_32_gamma_path =  dir_prefix + std::string("batch_normalization_32_gamma.bin"); 
-void* batch_normalization_32_gamma =  readTrainedWeights(batch_normalization_32_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_32_beta_path =  dir_prefix + std::string("batch_normalization_32_beta.bin"); 
-void* batch_normalization_32_beta =  readTrainedWeights(batch_normalization_32_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_32_mean_path =  dir_prefix + std::string("batch_normalization_32_mean.bin"); 
-void* batch_normalization_32_mean =  readTrainedWeights(batch_normalization_32_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_32_variance_path =  dir_prefix + std::string("batch_normalization_32_variance.bin"); 
-void* batch_normalization_32_variance =  readTrainedWeights(batch_normalization_32_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_23_w_path =  dir_prefix + std::string("conv2d_23_w.bin"); 
-void* conv2d_23_w =  readTrainedWeights(conv2d_23_w_path.c_str(), 0,576,96,1,1); 
-std::string batch_normalization_33_gamma_path =  dir_prefix + std::string("batch_normalization_33_gamma.bin"); 
-void* batch_normalization_33_gamma =  readTrainedWeights(batch_normalization_33_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_33_beta_path =  dir_prefix + std::string("batch_normalization_33_beta.bin"); 
-void* batch_normalization_33_beta =  readTrainedWeights(batch_normalization_33_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_33_mean_path =  dir_prefix + std::string("batch_normalization_33_mean.bin"); 
-void* batch_normalization_33_mean =  readTrainedWeights(batch_normalization_33_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_33_variance_path =  dir_prefix + std::string("batch_normalization_33_variance.bin"); 
-void* batch_normalization_33_variance =  readTrainedWeights(batch_normalization_33_variance_path.c_str(), 0,1,576,1,1); 
-std::string depthwise_conv2d_12_w_path =  dir_prefix + std::string("depthwise_conv2d_12_w.bin"); 
-void* depthwise_conv2d_12_w =  readTrainedWeights(depthwise_conv2d_12_w_path.c_str(), 0,576,1,3,3); 
-std::string batch_normalization_34_gamma_path =  dir_prefix + std::string("batch_normalization_34_gamma.bin"); 
-void* batch_normalization_34_gamma =  readTrainedWeights(batch_normalization_34_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_34_beta_path =  dir_prefix + std::string("batch_normalization_34_beta.bin"); 
-void* batch_normalization_34_beta =  readTrainedWeights(batch_normalization_34_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_34_mean_path =  dir_prefix + std::string("batch_normalization_34_mean.bin"); 
-void* batch_normalization_34_mean =  readTrainedWeights(batch_normalization_34_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_34_variance_path =  dir_prefix + std::string("batch_normalization_34_variance.bin"); 
-void* batch_normalization_34_variance =  readTrainedWeights(batch_normalization_34_variance_path.c_str(), 0,1,576,1,1); 
-std::string conv2d_24_w_path =  dir_prefix + std::string("conv2d_24_w.bin"); 
-void* conv2d_24_w =  readTrainedWeights(conv2d_24_w_path.c_str(), 0,96,576,1,1); 
-std::string batch_normalization_35_gamma_path =  dir_prefix + std::string("batch_normalization_35_gamma.bin"); 
-void* batch_normalization_35_gamma =  readTrainedWeights(batch_normalization_35_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_35_beta_path =  dir_prefix + std::string("batch_normalization_35_beta.bin"); 
-void* batch_normalization_35_beta =  readTrainedWeights(batch_normalization_35_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_35_mean_path =  dir_prefix + std::string("batch_normalization_35_mean.bin"); 
-void* batch_normalization_35_mean =  readTrainedWeights(batch_normalization_35_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_35_variance_path =  dir_prefix + std::string("batch_normalization_35_variance.bin"); 
-void* batch_normalization_35_variance =  readTrainedWeights(batch_normalization_35_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_25_w_path =  dir_prefix + std::string("conv2d_25_w.bin"); 
-void* conv2d_25_w =  readTrainedWeights(conv2d_25_w_path.c_str(), 0,576,96,1,1); 
-std::string batch_normalization_36_gamma_path =  dir_prefix + std::string("batch_normalization_36_gamma.bin"); 
-void* batch_normalization_36_gamma =  readTrainedWeights(batch_normalization_36_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_36_beta_path =  dir_prefix + std::string("batch_normalization_36_beta.bin"); 
-void* batch_normalization_36_beta =  readTrainedWeights(batch_normalization_36_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_36_mean_path =  dir_prefix + std::string("batch_normalization_36_mean.bin"); 
-void* batch_normalization_36_mean =  readTrainedWeights(batch_normalization_36_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_36_variance_path =  dir_prefix + std::string("batch_normalization_36_variance.bin"); 
-void* batch_normalization_36_variance =  readTrainedWeights(batch_normalization_36_variance_path.c_str(), 0,1,576,1,1); 
-std::string depthwise_conv2d_13_w_path =  dir_prefix + std::string("depthwise_conv2d_13_w.bin"); 
-void* depthwise_conv2d_13_w =  readTrainedWeights(depthwise_conv2d_13_w_path.c_str(), 0,576,1,3,3); 
-std::string batch_normalization_37_gamma_path =  dir_prefix + std::string("batch_normalization_37_gamma.bin"); 
-void* batch_normalization_37_gamma =  readTrainedWeights(batch_normalization_37_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_37_beta_path =  dir_prefix + std::string("batch_normalization_37_beta.bin"); 
-void* batch_normalization_37_beta =  readTrainedWeights(batch_normalization_37_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_37_mean_path =  dir_prefix + std::string("batch_normalization_37_mean.bin"); 
-void* batch_normalization_37_mean =  readTrainedWeights(batch_normalization_37_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_37_variance_path =  dir_prefix + std::string("batch_normalization_37_variance.bin"); 
-void* batch_normalization_37_variance =  readTrainedWeights(batch_normalization_37_variance_path.c_str(), 0,1,576,1,1); 
-std::string conv2d_26_w_path =  dir_prefix + std::string("conv2d_26_w.bin"); 
-void* conv2d_26_w =  readTrainedWeights(conv2d_26_w_path.c_str(), 0,96,576,1,1); 
-std::string batch_normalization_38_gamma_path =  dir_prefix + std::string("batch_normalization_38_gamma.bin"); 
-void* batch_normalization_38_gamma =  readTrainedWeights(batch_normalization_38_gamma_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_38_beta_path =  dir_prefix + std::string("batch_normalization_38_beta.bin"); 
-void* batch_normalization_38_beta =  readTrainedWeights(batch_normalization_38_beta_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_38_mean_path =  dir_prefix + std::string("batch_normalization_38_mean.bin"); 
-void* batch_normalization_38_mean =  readTrainedWeights(batch_normalization_38_mean_path.c_str(), 0,1,96,1,1); 
-std::string batch_normalization_38_variance_path =  dir_prefix + std::string("batch_normalization_38_variance.bin"); 
-void* batch_normalization_38_variance =  readTrainedWeights(batch_normalization_38_variance_path.c_str(), 0,1,96,1,1); 
-std::string conv2d_27_w_path =  dir_prefix + std::string("conv2d_27_w.bin"); 
-void* conv2d_27_w =  readTrainedWeights(conv2d_27_w_path.c_str(), 0,576,96,1,1); 
-std::string batch_normalization_39_gamma_path =  dir_prefix + std::string("batch_normalization_39_gamma.bin"); 
-void* batch_normalization_39_gamma =  readTrainedWeights(batch_normalization_39_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_39_beta_path =  dir_prefix + std::string("batch_normalization_39_beta.bin"); 
-void* batch_normalization_39_beta =  readTrainedWeights(batch_normalization_39_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_39_mean_path =  dir_prefix + std::string("batch_normalization_39_mean.bin"); 
-void* batch_normalization_39_mean =  readTrainedWeights(batch_normalization_39_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_39_variance_path =  dir_prefix + std::string("batch_normalization_39_variance.bin"); 
-void* batch_normalization_39_variance =  readTrainedWeights(batch_normalization_39_variance_path.c_str(), 0,1,576,1,1); 
-std::string depthwise_conv2d_14_w_path =  dir_prefix + std::string("depthwise_conv2d_14_w.bin"); 
-void* depthwise_conv2d_14_w =  readTrainedWeights(depthwise_conv2d_14_w_path.c_str(), 0,576,1,3,3); 
-std::string batch_normalization_40_gamma_path =  dir_prefix + std::string("batch_normalization_40_gamma.bin"); 
-void* batch_normalization_40_gamma =  readTrainedWeights(batch_normalization_40_gamma_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_40_beta_path =  dir_prefix + std::string("batch_normalization_40_beta.bin"); 
-void* batch_normalization_40_beta =  readTrainedWeights(batch_normalization_40_beta_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_40_mean_path =  dir_prefix + std::string("batch_normalization_40_mean.bin"); 
-void* batch_normalization_40_mean =  readTrainedWeights(batch_normalization_40_mean_path.c_str(), 0,1,576,1,1); 
-std::string batch_normalization_40_variance_path =  dir_prefix + std::string("batch_normalization_40_variance.bin"); 
-void* batch_normalization_40_variance =  readTrainedWeights(batch_normalization_40_variance_path.c_str(), 0,1,576,1,1); 
-std::string conv2d_28_w_path =  dir_prefix + std::string("conv2d_28_w.bin"); 
-void* conv2d_28_w =  readTrainedWeights(conv2d_28_w_path.c_str(), 0,160,576,1,1); 
-std::string batch_normalization_41_gamma_path =  dir_prefix + std::string("batch_normalization_41_gamma.bin"); 
-void* batch_normalization_41_gamma =  readTrainedWeights(batch_normalization_41_gamma_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_41_beta_path =  dir_prefix + std::string("batch_normalization_41_beta.bin"); 
-void* batch_normalization_41_beta =  readTrainedWeights(batch_normalization_41_beta_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_41_mean_path =  dir_prefix + std::string("batch_normalization_41_mean.bin"); 
-void* batch_normalization_41_mean =  readTrainedWeights(batch_normalization_41_mean_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_41_variance_path =  dir_prefix + std::string("batch_normalization_41_variance.bin"); 
-void* batch_normalization_41_variance =  readTrainedWeights(batch_normalization_41_variance_path.c_str(), 0,1,160,1,1); 
-std::string conv2d_29_w_path =  dir_prefix + std::string("conv2d_29_w.bin"); 
-void* conv2d_29_w =  readTrainedWeights(conv2d_29_w_path.c_str(), 0,960,160,1,1); 
-std::string batch_normalization_42_gamma_path =  dir_prefix + std::string("batch_normalization_42_gamma.bin"); 
-void* batch_normalization_42_gamma =  readTrainedWeights(batch_normalization_42_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_42_beta_path =  dir_prefix + std::string("batch_normalization_42_beta.bin"); 
-void* batch_normalization_42_beta =  readTrainedWeights(batch_normalization_42_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_42_mean_path =  dir_prefix + std::string("batch_normalization_42_mean.bin"); 
-void* batch_normalization_42_mean =  readTrainedWeights(batch_normalization_42_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_42_variance_path =  dir_prefix + std::string("batch_normalization_42_variance.bin"); 
-void* batch_normalization_42_variance =  readTrainedWeights(batch_normalization_42_variance_path.c_str(), 0,1,960,1,1); 
-std::string depthwise_conv2d_15_w_path =  dir_prefix + std::string("depthwise_conv2d_15_w.bin"); 
-void* depthwise_conv2d_15_w =  readTrainedWeights(depthwise_conv2d_15_w_path.c_str(), 0,960,1,3,3); 
-std::string batch_normalization_43_gamma_path =  dir_prefix + std::string("batch_normalization_43_gamma.bin"); 
-void* batch_normalization_43_gamma =  readTrainedWeights(batch_normalization_43_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_43_beta_path =  dir_prefix + std::string("batch_normalization_43_beta.bin"); 
-void* batch_normalization_43_beta =  readTrainedWeights(batch_normalization_43_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_43_mean_path =  dir_prefix + std::string("batch_normalization_43_mean.bin"); 
-void* batch_normalization_43_mean =  readTrainedWeights(batch_normalization_43_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_43_variance_path =  dir_prefix + std::string("batch_normalization_43_variance.bin"); 
-void* batch_normalization_43_variance =  readTrainedWeights(batch_normalization_43_variance_path.c_str(), 0,1,960,1,1); 
-std::string conv2d_30_w_path =  dir_prefix + std::string("conv2d_30_w.bin"); 
-void* conv2d_30_w =  readTrainedWeights(conv2d_30_w_path.c_str(), 0,160,960,1,1); 
-std::string batch_normalization_44_gamma_path =  dir_prefix + std::string("batch_normalization_44_gamma.bin"); 
-void* batch_normalization_44_gamma =  readTrainedWeights(batch_normalization_44_gamma_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_44_beta_path =  dir_prefix + std::string("batch_normalization_44_beta.bin"); 
-void* batch_normalization_44_beta =  readTrainedWeights(batch_normalization_44_beta_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_44_mean_path =  dir_prefix + std::string("batch_normalization_44_mean.bin"); 
-void* batch_normalization_44_mean =  readTrainedWeights(batch_normalization_44_mean_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_44_variance_path =  dir_prefix + std::string("batch_normalization_44_variance.bin"); 
-void* batch_normalization_44_variance =  readTrainedWeights(batch_normalization_44_variance_path.c_str(), 0,1,160,1,1); 
-std::string conv2d_31_w_path =  dir_prefix + std::string("conv2d_31_w.bin"); 
-void* conv2d_31_w =  readTrainedWeights(conv2d_31_w_path.c_str(), 0,960,160,1,1); 
-std::string batch_normalization_45_gamma_path =  dir_prefix + std::string("batch_normalization_45_gamma.bin"); 
-void* batch_normalization_45_gamma =  readTrainedWeights(batch_normalization_45_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_45_beta_path =  dir_prefix + std::string("batch_normalization_45_beta.bin"); 
-void* batch_normalization_45_beta =  readTrainedWeights(batch_normalization_45_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_45_mean_path =  dir_prefix + std::string("batch_normalization_45_mean.bin"); 
-void* batch_normalization_45_mean =  readTrainedWeights(batch_normalization_45_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_45_variance_path =  dir_prefix + std::string("batch_normalization_45_variance.bin"); 
-void* batch_normalization_45_variance =  readTrainedWeights(batch_normalization_45_variance_path.c_str(), 0,1,960,1,1); 
-std::string depthwise_conv2d_16_w_path =  dir_prefix + std::string("depthwise_conv2d_16_w.bin"); 
-void* depthwise_conv2d_16_w =  readTrainedWeights(depthwise_conv2d_16_w_path.c_str(), 0,960,1,3,3); 
-std::string batch_normalization_46_gamma_path =  dir_prefix + std::string("batch_normalization_46_gamma.bin"); 
-void* batch_normalization_46_gamma =  readTrainedWeights(batch_normalization_46_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_46_beta_path =  dir_prefix + std::string("batch_normalization_46_beta.bin"); 
-void* batch_normalization_46_beta =  readTrainedWeights(batch_normalization_46_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_46_mean_path =  dir_prefix + std::string("batch_normalization_46_mean.bin"); 
-void* batch_normalization_46_mean =  readTrainedWeights(batch_normalization_46_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_46_variance_path =  dir_prefix + std::string("batch_normalization_46_variance.bin"); 
-void* batch_normalization_46_variance =  readTrainedWeights(batch_normalization_46_variance_path.c_str(), 0,1,960,1,1); 
-std::string conv2d_32_w_path =  dir_prefix + std::string("conv2d_32_w.bin"); 
-void* conv2d_32_w =  readTrainedWeights(conv2d_32_w_path.c_str(), 0,160,960,1,1); 
-std::string batch_normalization_47_gamma_path =  dir_prefix + std::string("batch_normalization_47_gamma.bin"); 
-void* batch_normalization_47_gamma =  readTrainedWeights(batch_normalization_47_gamma_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_47_beta_path =  dir_prefix + std::string("batch_normalization_47_beta.bin"); 
-void* batch_normalization_47_beta =  readTrainedWeights(batch_normalization_47_beta_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_47_mean_path =  dir_prefix + std::string("batch_normalization_47_mean.bin"); 
-void* batch_normalization_47_mean =  readTrainedWeights(batch_normalization_47_mean_path.c_str(), 0,1,160,1,1); 
-std::string batch_normalization_47_variance_path =  dir_prefix + std::string("batch_normalization_47_variance.bin"); 
-void* batch_normalization_47_variance =  readTrainedWeights(batch_normalization_47_variance_path.c_str(), 0,1,160,1,1); 
-std::string conv2d_33_w_path =  dir_prefix + std::string("conv2d_33_w.bin"); 
-void* conv2d_33_w =  readTrainedWeights(conv2d_33_w_path.c_str(), 0,960,160,1,1); 
-std::string batch_normalization_48_gamma_path =  dir_prefix + std::string("batch_normalization_48_gamma.bin"); 
-void* batch_normalization_48_gamma =  readTrainedWeights(batch_normalization_48_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_48_beta_path =  dir_prefix + std::string("batch_normalization_48_beta.bin"); 
-void* batch_normalization_48_beta =  readTrainedWeights(batch_normalization_48_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_48_mean_path =  dir_prefix + std::string("batch_normalization_48_mean.bin"); 
-void* batch_normalization_48_mean =  readTrainedWeights(batch_normalization_48_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_48_variance_path =  dir_prefix + std::string("batch_normalization_48_variance.bin"); 
-void* batch_normalization_48_variance =  readTrainedWeights(batch_normalization_48_variance_path.c_str(), 0,1,960,1,1); 
-std::string depthwise_conv2d_17_w_path =  dir_prefix + std::string("depthwise_conv2d_17_w.bin"); 
-void* depthwise_conv2d_17_w =  readTrainedWeights(depthwise_conv2d_17_w_path.c_str(), 0,960,1,3,3); 
-std::string batch_normalization_49_gamma_path =  dir_prefix + std::string("batch_normalization_49_gamma.bin"); 
-void* batch_normalization_49_gamma =  readTrainedWeights(batch_normalization_49_gamma_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_49_beta_path =  dir_prefix + std::string("batch_normalization_49_beta.bin"); 
-void* batch_normalization_49_beta =  readTrainedWeights(batch_normalization_49_beta_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_49_mean_path =  dir_prefix + std::string("batch_normalization_49_mean.bin"); 
-void* batch_normalization_49_mean =  readTrainedWeights(batch_normalization_49_mean_path.c_str(), 0,1,960,1,1); 
-std::string batch_normalization_49_variance_path =  dir_prefix + std::string("batch_normalization_49_variance.bin"); 
-void* batch_normalization_49_variance =  readTrainedWeights(batch_normalization_49_variance_path.c_str(), 0,1,960,1,1); 
-std::string conv2d_34_w_path =  dir_prefix + std::string("conv2d_34_w.bin"); 
-void* conv2d_34_w =  readTrainedWeights(conv2d_34_w_path.c_str(), 0,320,960,1,1); 
-std::string batch_normalization_50_gamma_path =  dir_prefix + std::string("batch_normalization_50_gamma.bin"); 
-void* batch_normalization_50_gamma =  readTrainedWeights(batch_normalization_50_gamma_path.c_str(), 0,1,320,1,1); 
-std::string batch_normalization_50_beta_path =  dir_prefix + std::string("batch_normalization_50_beta.bin"); 
-void* batch_normalization_50_beta =  readTrainedWeights(batch_normalization_50_beta_path.c_str(), 0,1,320,1,1); 
-std::string batch_normalization_50_mean_path =  dir_prefix + std::string("batch_normalization_50_mean.bin"); 
-void* batch_normalization_50_mean =  readTrainedWeights(batch_normalization_50_mean_path.c_str(), 0,1,320,1,1); 
-std::string batch_normalization_50_variance_path =  dir_prefix + std::string("batch_normalization_50_variance.bin"); 
-void* batch_normalization_50_variance =  readTrainedWeights(batch_normalization_50_variance_path.c_str(), 0,1,320,1,1); 
-std::string conv2d_35_w_path =  dir_prefix + std::string("conv2d_35_w.bin"); 
-void* conv2d_35_w =  readTrainedWeights(conv2d_35_w_path.c_str(), 0,1280,320,1,1); 
-std::string batch_normalization_51_gamma_path =  dir_prefix + std::string("batch_normalization_51_gamma.bin"); 
-void* batch_normalization_51_gamma =  readTrainedWeights(batch_normalization_51_gamma_path.c_str(), 0,1,1280,1,1); 
-std::string batch_normalization_51_beta_path =  dir_prefix + std::string("batch_normalization_51_beta.bin"); 
-void* batch_normalization_51_beta =  readTrainedWeights(batch_normalization_51_beta_path.c_str(), 0,1,1280,1,1); 
-std::string batch_normalization_51_mean_path =  dir_prefix + std::string("batch_normalization_51_mean.bin"); 
-void* batch_normalization_51_mean =  readTrainedWeights(batch_normalization_51_mean_path.c_str(), 0,1,1280,1,1); 
-std::string batch_normalization_51_variance_path =  dir_prefix + std::string("batch_normalization_51_variance.bin"); 
-void* batch_normalization_51_variance =  readTrainedWeights(batch_normalization_51_variance_path.c_str(), 0,1,1280,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,5120,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_2 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 1); 
-void* var_4 = tensorConvolution(var_2, depthwise_conv2d_1_w, 1, 1, 1, 1, 1, 32); 
-void* var_5 = tensorBatchNorm(var_4, batch_normalization_1_gamma, batch_normalization_1_beta, batch_normalization_1_mean, batch_normalization_1_variance, 0.001); 
-void* var_6 = tensorRelu(var_5); 
-void* var_7 = tensorConvolution(var_6, conv2d_2_w, 0, 0, 1, 1, 1, 1); 
-void* var_8 = tensorBatchNorm(var_7, batch_normalization_2_gamma, batch_normalization_2_beta, batch_normalization_2_mean, batch_normalization_2_variance, 0.001); 
-void* var_9 = tensorConvolution(var_8, conv2d_3_w, 0, 0, 1, 1, 1, 1); 
-void* var_10 = tensorBatchNorm(var_9, batch_normalization_3_gamma, batch_normalization_3_beta, batch_normalization_3_mean, batch_normalization_3_variance, 0.001); 
-void* var_11 = tensorRelu(var_10); 
-void* var_13 = tensorConvolution(var_11, depthwise_conv2d_2_w, 1, 1, 1, 1, 1, 96); 
-void* var_14 = tensorBatchNorm(var_13, batch_normalization_4_gamma, batch_normalization_4_beta, batch_normalization_4_mean, batch_normalization_4_variance, 0.001); 
-void* var_15 = tensorRelu(var_14); 
-void* var_16 = tensorConvolution(var_15, conv2d_4_w, 0, 0, 1, 1, 1, 1); 
-void* var_17 = tensorBatchNorm(var_16, batch_normalization_5_gamma, batch_normalization_5_beta, batch_normalization_5_mean, batch_normalization_5_variance, 0.001); 
-void* var_18 = tensorConvolution(var_17, conv2d_5_w, 0, 0, 1, 1, 1, 1); 
-void* var_19 = tensorBatchNorm(var_18, batch_normalization_6_gamma, batch_normalization_6_beta, batch_normalization_6_mean, batch_normalization_6_variance, 0.001); 
-void* var_20 = tensorRelu(var_19); 
-void* var_22 = tensorConvolution(var_20, depthwise_conv2d_3_w, 1, 1, 1, 1, 1, 144); 
-void* var_23 = tensorBatchNorm(var_22, batch_normalization_7_gamma, batch_normalization_7_beta, batch_normalization_7_mean, batch_normalization_7_variance, 0.001); 
-void* var_24 = tensorRelu(var_23); 
-void* var_25 = tensorConvolution(var_24, conv2d_6_w, 0, 0, 1, 1, 1, 1); 
-void* var_26 = tensorBatchNorm(var_25, batch_normalization_8_gamma, batch_normalization_8_beta, batch_normalization_8_mean, batch_normalization_8_variance, 0.001); 
-void* var_27 = tensorAdd(var_17, var_26); 
-void* var_28 = tensorConvolution(var_27, conv2d_7_w, 0, 0, 1, 1, 1, 1); 
-void* var_29 = tensorBatchNorm(var_28, batch_normalization_9_gamma, batch_normalization_9_beta, batch_normalization_9_mean, batch_normalization_9_variance, 0.001); 
-void* var_30 = tensorRelu(var_29); 
-void* var_32 = tensorConvolution(var_30, depthwise_conv2d_4_w, 1, 1, 2, 2, 1, 144); 
-void* var_33 = tensorBatchNorm(var_32, batch_normalization_10_gamma, batch_normalization_10_beta, batch_normalization_10_mean, batch_normalization_10_variance, 0.001); 
-void* var_34 = tensorRelu(var_33); 
-void* var_35 = tensorConvolution(var_34, conv2d_8_w, 0, 0, 1, 1, 1, 1); 
-void* var_36 = tensorBatchNorm(var_35, batch_normalization_11_gamma, batch_normalization_11_beta, batch_normalization_11_mean, batch_normalization_11_variance, 0.001); 
-void* var_37 = tensorConvolution(var_36, conv2d_9_w, 0, 0, 1, 1, 1, 1); 
-void* var_38 = tensorBatchNorm(var_37, batch_normalization_12_gamma, batch_normalization_12_beta, batch_normalization_12_mean, batch_normalization_12_variance, 0.001); 
-void* var_39 = tensorRelu(var_38); 
-void* var_41 = tensorConvolution(var_39, depthwise_conv2d_5_w, 1, 1, 1, 1, 1, 192); 
-void* var_42 = tensorBatchNorm(var_41, batch_normalization_13_gamma, batch_normalization_13_beta, batch_normalization_13_mean, batch_normalization_13_variance, 0.001); 
-void* var_43 = tensorRelu(var_42); 
-void* var_44 = tensorConvolution(var_43, conv2d_10_w, 0, 0, 1, 1, 1, 1); 
-void* var_45 = tensorBatchNorm(var_44, batch_normalization_14_gamma, batch_normalization_14_beta, batch_normalization_14_mean, batch_normalization_14_variance, 0.001); 
-void* var_46 = tensorAdd(var_36, var_45); 
-void* var_47 = tensorConvolution(var_46, conv2d_11_w, 0, 0, 1, 1, 1, 1); 
-void* var_48 = tensorBatchNorm(var_47, batch_normalization_15_gamma, batch_normalization_15_beta, batch_normalization_15_mean, batch_normalization_15_variance, 0.001); 
-void* var_49 = tensorRelu(var_48); 
-void* var_51 = tensorConvolution(var_49, depthwise_conv2d_6_w, 1, 1, 1, 1, 1, 192); 
-void* var_52 = tensorBatchNorm(var_51, batch_normalization_16_gamma, batch_normalization_16_beta, batch_normalization_16_mean, batch_normalization_16_variance, 0.001); 
-void* var_53 = tensorRelu(var_52); 
-void* var_54 = tensorConvolution(var_53, conv2d_12_w, 0, 0, 1, 1, 1, 1); 
-void* var_55 = tensorBatchNorm(var_54, batch_normalization_17_gamma, batch_normalization_17_beta, batch_normalization_17_mean, batch_normalization_17_variance, 0.001); 
-void* var_56 = tensorAdd(var_46, var_55); 
-void* var_57 = tensorConvolution(var_56, conv2d_13_w, 0, 0, 1, 1, 1, 1); 
-void* var_58 = tensorBatchNorm(var_57, batch_normalization_18_gamma, batch_normalization_18_beta, batch_normalization_18_mean, batch_normalization_18_variance, 0.001); 
-void* var_59 = tensorRelu(var_58); 
-void* var_61 = tensorConvolution(var_59, depthwise_conv2d_7_w, 1, 1, 2, 2, 1, 192); 
-void* var_62 = tensorBatchNorm(var_61, batch_normalization_19_gamma, batch_normalization_19_beta, batch_normalization_19_mean, batch_normalization_19_variance, 0.001); 
-void* var_63 = tensorRelu(var_62); 
-void* var_64 = tensorConvolution(var_63, conv2d_14_w, 0, 0, 1, 1, 1, 1); 
-void* var_65 = tensorBatchNorm(var_64, batch_normalization_20_gamma, batch_normalization_20_beta, batch_normalization_20_mean, batch_normalization_20_variance, 0.001); 
-void* var_66 = tensorConvolution(var_65, conv2d_15_w, 0, 0, 1, 1, 1, 1); 
-void* var_67 = tensorBatchNorm(var_66, batch_normalization_21_gamma, batch_normalization_21_beta, batch_normalization_21_mean, batch_normalization_21_variance, 0.001); 
-void* var_68 = tensorRelu(var_67); 
-void* var_70 = tensorConvolution(var_68, depthwise_conv2d_8_w, 1, 1, 1, 1, 1, 384); 
-void* var_71 = tensorBatchNorm(var_70, batch_normalization_22_gamma, batch_normalization_22_beta, batch_normalization_22_mean, batch_normalization_22_variance, 0.001); 
-void* var_72 = tensorRelu(var_71); 
-void* var_73 = tensorConvolution(var_72, conv2d_16_w, 0, 0, 1, 1, 1, 1); 
-void* var_74 = tensorBatchNorm(var_73, batch_normalization_23_gamma, batch_normalization_23_beta, batch_normalization_23_mean, batch_normalization_23_variance, 0.001); 
-void* var_75 = tensorAdd(var_65, var_74); 
-void* var_76 = tensorConvolution(var_75, conv2d_17_w, 0, 0, 1, 1, 1, 1); 
-void* var_77 = tensorBatchNorm(var_76, batch_normalization_24_gamma, batch_normalization_24_beta, batch_normalization_24_mean, batch_normalization_24_variance, 0.001); 
-void* var_78 = tensorRelu(var_77); 
-void* var_80 = tensorConvolution(var_78, depthwise_conv2d_9_w, 1, 1, 1, 1, 1, 384); 
-void* var_81 = tensorBatchNorm(var_80, batch_normalization_25_gamma, batch_normalization_25_beta, batch_normalization_25_mean, batch_normalization_25_variance, 0.001); 
-void* var_82 = tensorRelu(var_81); 
-void* var_83 = tensorConvolution(var_82, conv2d_18_w, 0, 0, 1, 1, 1, 1); 
-void* var_84 = tensorBatchNorm(var_83, batch_normalization_26_gamma, batch_normalization_26_beta, batch_normalization_26_mean, batch_normalization_26_variance, 0.001); 
-void* var_85 = tensorAdd(var_75, var_84); 
-void* var_86 = tensorConvolution(var_85, conv2d_19_w, 0, 0, 1, 1, 1, 1); 
-void* var_87 = tensorBatchNorm(var_86, batch_normalization_27_gamma, batch_normalization_27_beta, batch_normalization_27_mean, batch_normalization_27_variance, 0.001); 
-void* var_88 = tensorRelu(var_87); 
-void* var_90 = tensorConvolution(var_88, depthwise_conv2d_10_w, 1, 1, 1, 1, 1, 384); 
-void* var_91 = tensorBatchNorm(var_90, batch_normalization_28_gamma, batch_normalization_28_beta, batch_normalization_28_mean, batch_normalization_28_variance, 0.001); 
-void* var_92 = tensorRelu(var_91); 
-void* var_93 = tensorConvolution(var_92, conv2d_20_w, 0, 0, 1, 1, 1, 1); 
-void* var_94 = tensorBatchNorm(var_93, batch_normalization_29_gamma, batch_normalization_29_beta, batch_normalization_29_mean, batch_normalization_29_variance, 0.001); 
-void* var_95 = tensorAdd(var_85, var_94); 
-void* var_97 = tensorConvolution(var_95, conv2d_21_w, 0, 0, 1, 1, 1, 1); 
-void* var_98 = tensorBatchNorm(var_97, batch_normalization_30_gamma, batch_normalization_30_beta, batch_normalization_30_mean, batch_normalization_30_variance, 0.001); 
-void* var_99 = tensorRelu(var_98); 
-void* var_101 = tensorConvolution(var_99, depthwise_conv2d_11_w, 1, 1, 1, 1, 1, 384); 
-void* var_102 = tensorBatchNorm(var_101, batch_normalization_31_gamma, batch_normalization_31_beta, batch_normalization_31_mean, batch_normalization_31_variance, 0.001); 
-void* var_103 = tensorRelu(var_102); 
-void* var_104 = tensorConvolution(var_103, conv2d_22_w, 0, 0, 1, 1, 1, 1); 
-void* var_105 = tensorBatchNorm(var_104, batch_normalization_32_gamma, batch_normalization_32_beta, batch_normalization_32_mean, batch_normalization_32_variance, 0.001); 
-void* var_106 = tensorConvolution(var_105, conv2d_23_w, 0, 0, 1, 1, 1, 1); 
-void* var_107 = tensorBatchNorm(var_106, batch_normalization_33_gamma, batch_normalization_33_beta, batch_normalization_33_mean, batch_normalization_33_variance, 0.001); 
-void* var_108 = tensorRelu(var_107); 
-void* var_110 = tensorConvolution(var_108, depthwise_conv2d_12_w, 1, 1, 1, 1, 1, 576); 
-void* var_111 = tensorBatchNorm(var_110, batch_normalization_34_gamma, batch_normalization_34_beta, batch_normalization_34_mean, batch_normalization_34_variance, 0.001); 
-void* var_112 = tensorRelu(var_111); 
-void* var_113 = tensorConvolution(var_112, conv2d_24_w, 0, 0, 1, 1, 1, 1); 
-void* var_114 = tensorBatchNorm(var_113, batch_normalization_35_gamma, batch_normalization_35_beta, batch_normalization_35_mean, batch_normalization_35_variance, 0.001); 
-void* var_115 = tensorAdd(var_105, var_114); 
-void* var_116 = tensorConvolution(var_115, conv2d_25_w, 0, 0, 1, 1, 1, 1); 
-void* var_117 = tensorBatchNorm(var_116, batch_normalization_36_gamma, batch_normalization_36_beta, batch_normalization_36_mean, batch_normalization_36_variance, 0.001); 
-void* var_118 = tensorRelu(var_117); 
-void* var_120 = tensorConvolution(var_118, depthwise_conv2d_13_w, 1, 1, 1, 1, 1, 576); 
-void* var_121 = tensorBatchNorm(var_120, batch_normalization_37_gamma, batch_normalization_37_beta, batch_normalization_37_mean, batch_normalization_37_variance, 0.001); 
-void* var_122 = tensorRelu(var_121); 
-void* var_123 = tensorConvolution(var_122, conv2d_26_w, 0, 0, 1, 1, 1, 1); 
-void* var_124 = tensorBatchNorm(var_123, batch_normalization_38_gamma, batch_normalization_38_beta, batch_normalization_38_mean, batch_normalization_38_variance, 0.001); 
-void* var_125 = tensorAdd(var_115, var_124); 
-void* var_127 = tensorConvolution(var_125, conv2d_27_w, 0, 0, 1, 1, 1, 1); 
-void* var_128 = tensorBatchNorm(var_127, batch_normalization_39_gamma, batch_normalization_39_beta, batch_normalization_39_mean, batch_normalization_39_variance, 0.001); 
-void* var_129 = tensorRelu(var_128); 
-void* var_131 = tensorConvolution(var_129, depthwise_conv2d_14_w, 1, 1, 2, 2, 1, 576); 
-void* var_132 = tensorBatchNorm(var_131, batch_normalization_40_gamma, batch_normalization_40_beta, batch_normalization_40_mean, batch_normalization_40_variance, 0.001); 
-void* var_133 = tensorRelu(var_132); 
-void* var_134 = tensorConvolution(var_133, conv2d_28_w, 0, 0, 1, 1, 1, 1); 
-void* var_135 = tensorBatchNorm(var_134, batch_normalization_41_gamma, batch_normalization_41_beta, batch_normalization_41_mean, batch_normalization_41_variance, 0.001); 
-void* var_136 = tensorConvolution(var_135, conv2d_29_w, 0, 0, 1, 1, 1, 1); 
-void* var_137 = tensorBatchNorm(var_136, batch_normalization_42_gamma, batch_normalization_42_beta, batch_normalization_42_mean, batch_normalization_42_variance, 0.001); 
-void* var_138 = tensorRelu(var_137); 
-void* var_140 = tensorConvolution(var_138, depthwise_conv2d_15_w, 1, 1, 1, 1, 1, 960); 
-void* var_141 = tensorBatchNorm(var_140, batch_normalization_43_gamma, batch_normalization_43_beta, batch_normalization_43_mean, batch_normalization_43_variance, 0.001); 
-void* var_142 = tensorRelu(var_141); 
-void* var_143 = tensorConvolution(var_142, conv2d_30_w, 0, 0, 1, 1, 1, 1); 
-void* var_144 = tensorBatchNorm(var_143, batch_normalization_44_gamma, batch_normalization_44_beta, batch_normalization_44_mean, batch_normalization_44_variance, 0.001); 
-void* var_145 = tensorAdd(var_135, var_144); 
-void* var_146 = tensorConvolution(var_145, conv2d_31_w, 0, 0, 1, 1, 1, 1); 
-void* var_147 = tensorBatchNorm(var_146, batch_normalization_45_gamma, batch_normalization_45_beta, batch_normalization_45_mean, batch_normalization_45_variance, 0.001); 
-void* var_148 = tensorRelu(var_147); 
-void* var_150 = tensorConvolution(var_148, depthwise_conv2d_16_w, 1, 1, 1, 1, 1, 960); 
-void* var_151 = tensorBatchNorm(var_150, batch_normalization_46_gamma, batch_normalization_46_beta, batch_normalization_46_mean, batch_normalization_46_variance, 0.001); 
-void* var_152 = tensorRelu(var_151); 
-void* var_153 = tensorConvolution(var_152, conv2d_32_w, 0, 0, 1, 1, 1, 1); 
-void* var_154 = tensorBatchNorm(var_153, batch_normalization_47_gamma, batch_normalization_47_beta, batch_normalization_47_mean, batch_normalization_47_variance, 0.001); 
-void* var_155 = tensorAdd(var_145, var_154); 
-void* var_157 = tensorConvolution(var_155, conv2d_33_w, 0, 0, 1, 1, 1, 1); 
-void* var_158 = tensorBatchNorm(var_157, batch_normalization_48_gamma, batch_normalization_48_beta, batch_normalization_48_mean, batch_normalization_48_variance, 0.001); 
-void* var_159 = tensorRelu(var_158); 
-void* var_161 = tensorConvolution(var_159, depthwise_conv2d_17_w, 1, 1, 1, 1, 1, 960); 
-void* var_162 = tensorBatchNorm(var_161, batch_normalization_49_gamma, batch_normalization_49_beta, batch_normalization_49_mean, batch_normalization_49_variance, 0.001); 
-void* var_163 = tensorRelu(var_162); 
-void* var_164 = tensorConvolution(var_163, conv2d_34_w, 0, 0, 1, 1, 1, 1); 
-void* var_165 = tensorBatchNorm(var_164, batch_normalization_50_gamma, batch_normalization_50_beta, batch_normalization_50_mean, batch_normalization_50_variance, 0.001); 
-void* var_167 = tensorConvolution(var_165, conv2d_35_w, 0, 0, 1, 1, 1, 1); 
-void* var_168 = tensorBatchNorm(var_167, batch_normalization_51_gamma, batch_normalization_51_beta, batch_normalization_51_mean, batch_normalization_51_variance, 0.001); 
-void* var_169 = tensorRelu(var_168); 
-void* var_170 = tensorPooling(var_169,1,2,2,0,0,2,2); 
-void* var_172 = tensorGemmGPU(var_170, dense_1_w); 
-void* var_173 = tensorAdd(var_172, dense_1_b); 
-void* var_174 = tensorSoftmax(var_173); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_174); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/pipeline/Pipeline.py b/llvm/projects/hpvm-tensor-rt/model_params/pipeline/Pipeline.py
deleted file mode 100644
index 0b07a503d237965956801b410fd16fc5a128eb46..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/pipeline/Pipeline.py
+++ /dev/null
@@ -1,207 +0,0 @@
-#!/usr/bin/env python
-# coding: utf-8
-
-# arg 1: 0/1, to denote whether to show outputs on screen
-# arg 2: 0/1, to denote whether to save outputs
-# arg 3: input image
-
-import numpy as np
-import os
-import sys
-import matplotlib.pyplot as plt
-import cv2
-from scipy.ndimage import gaussian_filter
-#from skimage import io
-import struct
-
-
-N = 9145
-H = 240
-W = 300
-
-
-images = np.fromfile("dataset/caltech101_255_float32.bin", dtype=np.float32)
-images = images.reshape(N, H, W)
-
-
-kernel_gaussian = np.fromfile("filters/GaussianFilter.bin", dtype = np.float32).reshape(9,9)
-kernel_outline = np.fromfile("filters/OutlineFilter.bin", dtype = np.float32).reshape(3,3)
-kernel_sharpen = np.fromfile("filters/SharpenFilter.bin", dtype = np.float32).reshape(3,3)
-kernel_motionblur = np.fromfile("filters/MotionblurFilter.bin", dtype = np.float32).reshape(9,9)
-kernel_emboss = np.fromfile("filters/EmbossFilter.bin", dtype = np.float32).reshape(5,5)
-bias_emboss = np.fromfile("filters/EmbossBias.bin", dtype = np.float32)
-
-
-print(kernel_gaussian.min(), kernel_gaussian.max())
-print(kernel_outline.min(), kernel_outline.max())
-print(kernel_sharpen.min(), kernel_sharpen.max())
-print(kernel_motionblur.min(), kernel_motionblur.max())
-print(kernel_emboss.min(), kernel_emboss.max())
-print(bias_emboss.min(), bias_emboss.max())
-
-
-# Gaussian Blurring
-def Gaussian(img):
-#     size = 9
-#     sigma = 1.
-#     G = generate_gaussian_kernel(size,sigma)
-    output=cv2.filter2D(img,-1,kernel_gaussian) 
-    output = np.clip(output, 0, 255)
-#     G.astype(np.float32).tofile("GaussianFilter.bin")
-    return output
-
-
-# Keeps edges of image
-def Outline(img):
-#     kernel_outline = np.array([[-1, -1, -1],
-#                 [-1,  8, -1],
-#                 [-1, -1, -1]])
-    output=cv2.filter2D(img,-1,kernel_outline) 
-#     kernel_outline.astype(np.float32).tofile("OutlineFilter.bin")
-    output = np.clip(output, 0, 255)
-    return output
-
-
-def Sharpen(img):
-#     kernel_sharpen = np.array([[0, -1, 0],
-#                 [-1,  5, -1],
-#                 [0, -1, 0]])
-    output=cv2.filter2D(img,-1,kernel_sharpen) 
-#     kernel_sharpen.astype(np.float32).tofile("SharpenFilter.bin")
-    output = np.clip(output, 0, 255)
-    return output
-
-
-# Motion blur, from the top left to the bottom right
-def MotionBlur(img):
-#     kernel_motionblur = np.array([[1, 0, 0, 0, 0, 0, 0, 0, 0],
-#                 [0, 1, 0, 0, 0, 0, 0, 0, 0],
-#                 [0, 0, 1, 0, 0, 0, 0, 0, 0],
-#                 [0, 0, 0, 1, 0, 0, 0, 0, 0],
-#                 [0, 0, 0, 0, 1, 0, 0, 0, 0],
-#                 [0, 0, 0, 0, 0, 1, 0, 0, 0],
-#                 [0, 0, 0, 0, 0, 0, 1, 0, 0],
-#                 [0, 0, 0, 0, 0, 0, 0, 1, 0],
-#                 [0, 0, 0, 0, 0, 0, 0, 0, 1]]) / 9.0
-    output=cv2.filter2D(img,-1,kernel_motionblur) 
-#     kernel_motionblur.astype(np.float32).tofile("MotionblurFilter.bin")
-    output = np.clip(output, 0, 255)
-    return output
-
-
-# 5x5 45 degrees emboss, for an engraving effect
-def Emboss5x5(img):
-#     kernel_emboss = np.array([[-1, -1, -1, -1,  0],
-#                               [-1, -1, -1,  0,  1],
-#                               [-1, -1,  0,  1,  1],
-#                               [-1,  0,  1,  1,  1],
-#                               [ 0,  1,  1,  1,  1]])
-#     bias_emboss = np.full(1,128.0)
-    output=cv2.filter2D(img,-1,kernel_emboss) + bias_emboss
-#     output=cv2.filter2D(img,-1,kernel_emboss)
-#     kernel_emboss = kernel_emboss + 128.0
-#     kernel_emboss.astype(np.float32).tofile("EmbossFilter.bin")
-#     bias_emboss.astype(np.float32).tofile("EmbossBias.bin")
-    output = np.clip(output, 0, 255)
-    return output
-
-
-def dumpData(file_name, X_test):
-
-    N = X_test.shape[0]
-    C = X_test.shape[1]
-    H = X_test.shape[2]
-    W = X_test.shape[3]
- 
-    print ("*DumpData")
-    print("-min_val = ", np.amin(X_test))
-    print("-max_val = ", np.amax(X_test))
-    
-    f = open(file_name, "wb")
-    for i in range(N):
-        for j in range(C):
-            for k in range(H):
-                for l in range(W):
-                    val = struct.unpack("f", struct.pack("f", X_test[i][j][k][l]))
-                    f.write(np.float32(val[0]))
-
-    f.close()
-
-
-def dumpConvWeights(file_name, weights, N, C, H, W):
-
-    print (weights.shape)
-    print ("*DumpConvWeights")
-    print("-min_val = ", np.amin(weights))
-    print("-max_val = ", np.amax(weights))
-
-    f = open(file_name, "wb")
-    for i in range(N):
-        for j in range(C):
-            for k in range(H):
-                for l in range(W):
-                    f.write(weights[k][l][j][i])
-
-    f.close()
-
-
-def pipeline_G_O_M_E(img):
-    img = Gaussian(img)
-    img = Outline(img)
-    img = MotionBlur(img)
-    img = Emboss5x5(img)
-    return img
-
-
-def pipeline_G_S_M_E(img):
-    img = Gaussian(img)
-    img = Sharpen(img)
-    img = MotionBlur(img)
-    img = Emboss5x5(img)
-    return img
-
-
-def pipeline_G_E_O_M(img):
-    img = Gaussian(img)
-    img = Emboss5x5(img)
-    img = Outline(img)
-    img = MotionBlur(img)
-    return img
-
-
-def pipeline_G_E_S_M(img):
-    img = Gaussian(img)
-    img = Emboss5x5(img)
-    img = Sharpen(img)
-    img = MotionBlur(img)
-    return img
-
-
-def runPipeline(pipeline, images):
-    output = np.zeros_like(images)
-    for i in range(N):
-        output[i] = pipeline(images[i])
-    return output
-
-
-
-output_G_S_M_E = runPipeline(pipeline_G_S_M_E, images)
-output_G_S_M_E = output_G_S_M_E.reshape(N, 1, H, W)
-output_G_S_M_E.astype(np.float32).tofile("caltech-gaussian-sharpen-motionblur-emboss.bin")
-
-
-output_G_O_M_E = runPipeline(pipeline_G_O_M_E, images)
-output_G_O_M_E = output_G_O_M_E.reshape(N, 1, H, W)
-output_G_O_M_E.astype(np.float32).tofile("caltech-gaussian-outline-motionblur-emboss.bin")
-
-
-output_G_E_O_M = runPipeline(pipeline_G_E_O_M, images)
-output_G_E_O_M.shape
-output_G_E_O_M.astype(np.float32).tofile("caltech-gaussian-emboss-outline-motionblur.bin")
-
-
-output_G_E_S_M = runPipeline(pipeline_G_E_S_M, images)
-output_G_E_S_M.shape
-output_G_E_S_M.astype(np.float32).tofile("caltech-gaussian-emboss-sharpen-motionblur.bin")
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/pipeline/dataset/.gitkeep b/llvm/projects/hpvm-tensor-rt/model_params/pipeline/dataset/.gitkeep
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/pipeline/filters/.gitkeep b/llvm/projects/hpvm-tensor-rt/model_params/pipeline/filters/.gitkeep
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/pipeline/getImage.py b/llvm/projects/hpvm-tensor-rt/model_params/pipeline/getImage.py
deleted file mode 100644
index 7b4984024e9c0188cab43705ae816c2ab60316c3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/pipeline/getImage.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import sys
-import matplotlib.pyplot as plt
-import numpy as np
-
-num_args = len(sys.argv)
-if(num_args < 3 or num_args > 4):
-    print("Usage:\n\t python getImage.py <binary_file> <image_index> <total_images = 9145 (default)>\n")
-    sys.exit(0)
-
-#print(sys.argv[1])
-#print(sys.argv[2])
-N = 9145
-if (num_args == 4):
-    N = int(sys.argv[3])
-
-H = 240
-W = 300
-
-filename = str(sys.argv[1])
-index = int(sys.argv[2])
-
-images = np.fromfile(filename, dtype=np.float32)
-images = images.reshape(N, H, W)
-
-plt.imshow(images[index], cmap = 'gray')
-plt.savefig("output.jpeg")
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/pipeline/golden_output/.gitkeep b/llvm/projects/hpvm-tensor-rt/model_params/pipeline/golden_output/.gitkeep
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_10_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_10_b.bin
deleted file mode 100644
index d79b2e67bad72c6ebdb19c941b37756b263ef0a4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_10_b.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-ŧ%>„¶½é
-4½Égæ½¼R<T#m>|u>.v¾\{•½…HZ>ÕŽ½öÈÝ½Óøî¼‡½4l™>D•"½¶hß½;R¾œlȼ =†Ý>ðéõ¼Ã3×=ôpã¼hÎ<¹ãžÛo<[·‹¾9
->2	G>ÿ…>½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_10_w.bin
deleted file mode 100644
index c2e01b7b4d964a09664708a0f026eaf99dd8eeeb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_11_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_11_b.bin
deleted file mode 100644
index e331a624e72a31c0f5e55e41f86b0955a99cb1c8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_11_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_11_w.bin
deleted file mode 100644
index f051ac0a8a447e657e1ec77253715874a9f6ebe7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_12_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_12_b.bin
deleted file mode 100644
index bb5b9987af846d46bbfe488685c374f88dd7e4c5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_12_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-)¼ø½³h཭Wོ;½„ï »ã˜=Ë«¾Øm¾È`;™u±¾¬{¾x޽?E¾D:B½dל½xt¾¾>)^½¾”¼Á ¾b}½èb½üéÖ½3õľž®p½=âõ½ªžš<ä§¾¼¹§
¼?nǽ
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_12_w.bin
deleted file mode 100644
index 4f38a0c9bee132b24fd928e979c0ba7dcd9a5c6e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_13_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_13_b.bin
deleted file mode 100644
index 37b3bb9a90e6cd116b6cd1851b95ace05ec1fd01..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_13_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-˜y¾Î×½~·1½‹¤9>Õs½5¬½¥±»:++¾’ã½§¯z½É4¾”ëm¼í¿ñ‚¾_²ž½=æ!½\æC½ÎØ¿l=›,½e¤K¾÷˜o=þz¾°,]½Š9^½+é¾{^J>𙼹ÆÛ½µ[¾(AоGã6¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_13_w.bin
deleted file mode 100644
index 2875c98f7b66a603c812ce56b36480588e1ea485..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_14_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_14_b.bin
deleted file mode 100644
index e9cdf2cb415724b7d5fcf06d5c7235d24aa8d149..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_14_b.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-Y7»½çÎr¾Nä1¾J5¾#F½F°½õÈ“=€ƒA¾bŠ$¾$ìU¼Ü–>;z¾Q‰ó½ðr=¾È::Í̺½šL¾pl’¾q÷½)³Î½o*ˆ½_¾¶Ï@¾›Å¿½É¨Y¾§ïµ¾ˆG1¾ÝÀ¾ëò‚=•H»»Ðe=k
-¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_14_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_14_w.bin
deleted file mode 100644
index e61a38f2341168a0c46d9ef85e8bbe28ca045cd4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_14_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_15_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_15_b.bin
deleted file mode 100644
index d5e57d9a30432bb4eba7306b5b7e4d9fd84949df..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_15_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_15_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_15_w.bin
deleted file mode 100644
index ec41201b44a58c65b0d6cc5953c35c9dadc49aee..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_15_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_16_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_16_b.bin
deleted file mode 100644
index 03bf31b1a8fa00dafd0ebe9cdceecc74805fac36..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_16_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_16_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_16_w.bin
deleted file mode 100644
index 737edac073bdb9aa82eafe27de2097a8321cb1c8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_16_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_17_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_17_b.bin
deleted file mode 100644
index 03bf31b1a8fa00dafd0ebe9cdceecc74805fac36..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_17_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_17_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_17_w.bin
deleted file mode 100644
index 10f8a9cdc5eed1f92ea041de47fd4265dd2b5173..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_17_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_18_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_18_b.bin
deleted file mode 100644
index e586a691a74fec2f49fee7f8cfa9943d9e68e64f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_18_b.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-Œó½P•=Á4½«¼B¾~
X>\œ<ôg¾«Aª>÷@¤>Z·=“,Ÿ>÷ð“½qG¬>Ö7¿¼úº6>Ç+޽^Æ€< Áé¼…½¼¼ñ¡=Lö]>ƒ²‡¾1¦·½-Lø½w4°=Í>ð<K¾`µ¯¼I#+¾‘錾<+½{é›=`s“½I†E>ßö¤¾Á™>(j7¾²`A½א> yò½Š¿B¾ñ<Ð5Á¾æ>>ó²‰=àÐy=zk¾	öͽk³½B>(½Ý×G¼$a>û;4½„ÃG¼‚>ƒ‚U>Éö:½Ú)¾ê´d=Mw=ç«‘¾v¡Ö½
-´¾ìÍž>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_18_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_18_w.bin
deleted file mode 100644
index 2b3ab07739e81e4d84193d34572f6d91b91312d8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_18_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_19_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_19_b.bin
deleted file mode 100644
index d6a21cf1303fd3a67ce3225abd6252c8bc651026..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_19_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_19_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_19_w.bin
deleted file mode 100644
index 6e6bdd816692555976defa55b8b87f2b60bbda04..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_19_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_1_b.bin
deleted file mode 100644
index 3b00ed7564353deafc3305193155b50f94fa3dca..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
- ¦Å½’»¤¼I]à=Xù<p¬Ê=›ÿ’=³Gt¾CÏŠ½ù9“¼Ò.*>“Ë=ϐÌ=}µK>‘Òd½HP=N4ö=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_1_w.bin
deleted file mode 100644
index cf6863997495166fc49724e7773b3322d798f971..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_20_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_20_b.bin
deleted file mode 100644
index 676ee4a73d8c84d64128c9b96a7426888a835d9a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_20_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-/n#>†ßá½wÿн;yÿ¼&°œ½-M>*¨q¾µû¤=§¤4>VZ÷<}ž>	,J½}#½e¨¾3ºQ¾Øfé¼àÂ>®Ü;9Ä5=ˆý€½Š¦—½¼ºø¼wHŒ>÷ë_½$R ½‚4m>ÜŸ#>gc¼;>ôÄ,½Ûð>Lp(½­Ù©½Ž£p>çy¢>á‚Î=T	F>ÞO™¼p3b¼’s°>WÃ$>‚쎾rë\½SþB>I®>ìÅ…½ww½¼†™½Eù½£L>9A½h¹‡¾ý™>¾zÇ>ôm#½µE¸½¯3=K×½V†¥½Nw¿>¾´¡½«n>	ßǽ
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_20_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_20_w.bin
deleted file mode 100644
index d90a00be5b22375c575184607af88395abce6f8f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_20_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_21_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_21_b.bin
deleted file mode 100644
index 9d648c9fe372dbeae9ceb9fafb4123ed0801d6f4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_21_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-¨8¸=ê|ݽ!¯=€Ø%>HÌ¥>ó„?Ÿƒ³¼ó>Šð!¾”À=-ýHqÚ>ç–€=ÍÙ9>ÑE>¿¾<çÖˆ¾*«¾y%‹¼jyõ;=#@>ì\÷=$†‰¼¯!î>âŒx¾¿mþ¼T*æ=ÓæD¼tîÞ:tÝw¾ØÍ>~7½Nt½›ç+>\Þÿ½H>Ü	‹>ýY7¾X¶$>qË„>kí¾!öè=ks ¾jXõ¾/G}>]¤î½iq>‹º¯>ñ¢œ=5wϽX©>Ãé³>¢¢= h>3q>ì¹8½—ál=ø0Q>ÍÜ¥=ÏDé=*s>ÿ7?>ôÿk½—‡=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_21_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_21_w.bin
deleted file mode 100644
index 03a4c7fd5fc4c143bf95e9ad2cbaff70528726f2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_21_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_2_b.bin
deleted file mode 100644
index 8991c48aec188c94fdd8b474fa12842e488fed40..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_2_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_2_w.bin
deleted file mode 100644
index a3ca19b974a113755116160162e8d365fc8aaa5e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_3_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_3_b.bin
deleted file mode 100644
index 7d292e0979713d666305e8c151f118c068216d22..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_3_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-ï˰=ü=Ú=Þä½¥‡=ªÞ=¸;ü½4&½Å>¼aÉà;¡_‡=%u>…ú=Åš.=@ˆ=þý×¼
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_3_w.bin
deleted file mode 100644
index 958fbd75554273a2ca78b41e6fecf11a978ffe80..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_4_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_4_b.bin
deleted file mode 100644
index dbc78bff40e5bd9d660513f3be716f46ab091a16..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_4_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-=–½G#Ò»HëÕ½ÓÔ=^j¾Dí¡½ž‡ˆ½"íU½ƒ4>PÜ=Z>=btŒ½à•>…„÷¼v6½Ù·,=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_4_w.bin
deleted file mode 100644
index fe149cd36a7923d5e14fc36f48f3edd9d2d63865..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_5_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_5_b.bin
deleted file mode 100644
index e7ff5b9111028db115205ded4e89cf0d93d35dd5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_5_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_5_w.bin
deleted file mode 100644
index 4fbc221209a1f3c76e56b223f935638120bce27b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_6_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_6_b.bin
deleted file mode 100644
index a8beb7f9a62af9acf3545689071c2f876eaabba0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_6_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-‘e=,i†¼‡%¾h²½µùX>£ôý;_©¾oË)>ÕŽ¥¾,9G½ÕÖ¾Ÿ†Ä¼Ù$¾YV<D5¾õt>¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_6_w.bin
deleted file mode 100644
index 2109938c064275ba444c2fa0dcaf822127abc8f5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_7_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_7_b.bin
deleted file mode 100644
index a2e9602e0c5368c1373b7c7d9e742c87cf9177ec..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_7_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-lù@½ƒÏH½ °i½dµ”½0%½.e¼ˆÞó¼ªrÖ;‡˜æ=¶t¾_¶Æ¼éŽA¼¶j>½àUÀ¼´†½žî½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_7_w.bin
deleted file mode 100644
index db47b1d51693b2dde778f1bc9b5829569005ae1b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_8_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_8_b.bin
deleted file mode 100644
index 8de983148a39656d75b8479b89953e39b0d5fd49..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_8_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-¯D2½ès€¼<¦½Óµþ;‘É&½šÉ:=ko¼]‘=°Šç=÷?>`ð,=ÉW¾KË4>@i"½)¢½>îì=¾1-=ÄÃB¾4üô;ûra=d–½[ŠG½©e	=ý÷>½1âÄ=B#a½ªi)>}õ–½9>­½6ÚܽÝ@½þ Î½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_8_w.bin
deleted file mode 100644
index 55b55e73b4e93359c60910cd1bc23287f9c12da3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_9_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_9_b.bin
deleted file mode 100644
index d79b2e67bad72c6ebdb19c941b37756b263ef0a4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_9_b.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-ŧ%>„¶½é
-4½Égæ½¼R<T#m>|u>.v¾\{•½…HZ>ÕŽ½öÈÝ½Óøî¼‡½4l™>D•"½¶hß½;R¾œlȼ =†Ý>ðéõ¼Ã3×=ôpã¼hÎ<¹ãžÛo<[·‹¾9
->2	G>ÿ…>½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_9_w.bin
deleted file mode 100644
index 37775413a908edd1ffd45bddfd45bdb3376f0b6f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_1_b.bin
deleted file mode 100644
index 17086790f385a651becfd73546736dc7fb309d6a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-~&º½HÙ˜¾döû<¸«0>‹s>²ºˆ¾ÈÏ›=¹m½’–Ý=Þ}1=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_1_w.bin
deleted file mode 100644
index ebd46d3c3cca156b55c65d547850141794f65245..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_2_b.bin
deleted file mode 100644
index 9a869c3271460631f381582c3c792c9501f56e38..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_2_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_2_w.bin
deleted file mode 100644
index c9db3bf629d1b58e942d18bdbac94483aca8fe35..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/dense_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/input.bin
deleted file mode 100644
index 60f85339bdb32c1e4b4ac6df83b6bf0c9385e9bc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/src.cc
deleted file mode 100644
index 2ba7ab5fd6ed7d8edac77ad1507c395b04b5328c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_2/src.cc
+++ /dev/null
@@ -1,193 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("resnet18_cifar10/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_2 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-void* var_3 = tensorAdd(var_2, conv2d_1_b); 
-void* var_4 = tensorRelu(var_3); 
-void* var_6 = tensorConvolution(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-void* var_7 = tensorAdd(var_6, conv2d_2_b); 
-void* var_8 = tensorRelu(var_7); 
-void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-void* var_12 = tensorAdd(var_4, var_11); 
-void* var_13 = tensorRelu(var_12); 
-void* var_15 = tensorConvolution(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-void* var_16 = tensorAdd(var_15, conv2d_4_b); 
-void* var_17 = tensorRelu(var_16); 
-void* var_19 = tensorConvolution(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-void* var_20 = tensorAdd(var_19, conv2d_5_b); 
-void* var_21 = tensorAdd(var_13, var_20); 
-void* var_22 = tensorRelu(var_21); 
-void* var_24 = tensorConvolution(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-void* var_25 = tensorAdd(var_24, conv2d_6_b); 
-void* var_26 = tensorRelu(var_25); 
-void* var_28 = tensorConvolution(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-void* var_29 = tensorAdd(var_28, conv2d_7_b); 
-void* var_30 = tensorAdd(var_22, var_29); 
-void* var_31 = tensorRelu(var_30); 
-void* var_33 = tensorConvolution(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-void* var_34 = tensorAdd(var_33, conv2d_8_b); 
-void* var_35 = tensorRelu(var_34); 
-void* var_37 = tensorConvolution(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-void* var_38 = tensorAdd(var_37, conv2d_9_b); 
-void* var_40 = tensorConvolution(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-void* var_41 = tensorAdd(var_40, conv2d_10_b); 
-void* var_42 = tensorAdd(var_41, var_38); 
-void* var_43 = tensorRelu(var_42); 
-void* var_45 = tensorConvolution(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-void* var_46 = tensorAdd(var_45, conv2d_11_b); 
-void* var_47 = tensorRelu(var_46); 
-void* var_49 = tensorConvolution(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-void* var_50 = tensorAdd(var_49, conv2d_12_b); 
-void* var_51 = tensorAdd(var_43, var_50); 
-void* var_52 = tensorRelu(var_51); 
-void* var_54 = tensorConvolution(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-void* var_55 = tensorAdd(var_54, conv2d_13_b); 
-void* var_56 = tensorRelu(var_55); 
-void* var_58 = tensorConvolution(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-void* var_59 = tensorAdd(var_58, conv2d_14_b); 
-void* var_60 = tensorAdd(var_52, var_59); 
-void* var_61 = tensorRelu(var_60); 
-void* var_63 = tensorConvolution(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-void* var_64 = tensorAdd(var_63, conv2d_15_b); 
-void* var_65 = tensorRelu(var_64); 
-void* var_67 = tensorConvolution(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-void* var_68 = tensorAdd(var_67, conv2d_16_b); 
-void* var_70 = tensorConvolution(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-void* var_71 = tensorAdd(var_70, conv2d_17_b); 
-void* var_72 = tensorAdd(var_71, var_68); 
-void* var_73 = tensorRelu(var_72); 
-void* var_75 = tensorConvolution(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-void* var_76 = tensorAdd(var_75, conv2d_18_b); 
-void* var_77 = tensorRelu(var_76); 
-void* var_79 = tensorConvolution(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-void* var_80 = tensorAdd(var_79, conv2d_19_b); 
-void* var_81 = tensorAdd(var_73, var_80); 
-void* var_82 = tensorRelu(var_81); 
-void* var_84 = tensorConvolution(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-void* var_85 = tensorAdd(var_84, conv2d_20_b); 
-void* var_86 = tensorRelu(var_85); 
-void* var_88 = tensorConvolution(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-void* var_89 = tensorAdd(var_88, conv2d_21_b); 
-void* var_90 = tensorAdd(var_82, var_89); 
-void* var_91 = tensorRelu(var_90); 
-void* var_92 = tensorPooling(var_91,1,8,8,0,0,8,8); 
-void* var_94 = tensorGemmGPU(var_92, dense_1_w); 
-void* var_95 = tensorAdd(var_94, dense_1_b); 
-void* var_96 = tensorSoftmax(var_95); 
-
-computeAccuracy2(labels,10000,var_96); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_10_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_10_b.bin
deleted file mode 100644
index 92ff703707bd456815de1f90693148e611ba23ee..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_10_b.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-¹ãe>ñºŒ¾	E¾ÄÑ>º©:?
-1Ò>œ²>Jkj>
¿>ª³ê=‹C?Oqß>Ô"¬>Bˆ<îüKºCæ>+ƒ-=sdÂ=ë
-˜¾ÍQ5½4½á0½<'/>¬Ô&?f“û>%–>O_?¼Zð=<è©»…W=@X€>sT¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_10_w.bin
deleted file mode 100644
index cc902484c129133a5a5d7a67ca74e089c6152b2a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_11_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_11_b.bin
deleted file mode 100644
index 33772ce97b4f61d4f61c62d8d17f72626eba97c0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_11_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-)ú>‰£¿¡œ¾„€L¾ÚV¿m̈½ê8½UÍ­¾ÄDƾņ¡¾×ˆü¾ÏD˾Ùü˜»þ€1½uK¿A–¾E¥‘¾K?¿@ôé½þ ¿Â-¾qç¥=. Ò¾±Æª¾™Q½¤3̼ÑËÆ¾àá4¿Æß›¾hf®¾ùì¾ù•¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_11_w.bin
deleted file mode 100644
index be55d74574e2b7d67ef6f1a2a949c72a72f22100..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_12_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_12_b.bin
deleted file mode 100644
index 514e8257230a47da74b502267fa458ba7961ae84..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_12_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_12_w.bin
deleted file mode 100644
index b3e23e4fb99f7da390808d6f27c5c215e73587e1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_13_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_13_b.bin
deleted file mode 100644
index 7b8aca0bfe4d25214f316f060b608d90db3a551c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_13_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_13_w.bin
deleted file mode 100644
index a6f45312f39af8e9875adc62dae7ff7ead5f3f4c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_14_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_14_b.bin
deleted file mode 100644
index cd778ed9562272eb456ae6a7e46c26114465e9f4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_14_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-8OL¾\¾D'4¾Ô©¼høÜ<¤ù‘¾xI¾QŽŒ¾z›¾<
¿	ý<·`¾×#A¾è³¾FÍ;N0¾6gw¾
D¾“8v¾{Ù:¾Á2¿Ï«¾°Bo¾Q”V¼Ç4y¾qanöˆ>Ky¾Ò¸¨¾\°–¾Úµ¾ªÅ¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_14_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_14_w.bin
deleted file mode 100644
index 6fd515c9c2c50145ebadf0a74189319fa67ba1b9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_14_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_15_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_15_b.bin
deleted file mode 100644
index 274686c5b933c435f4f2c20cac93dcd94c5ae329..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_15_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_15_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_15_w.bin
deleted file mode 100644
index f79d7516c7051f416c76382aeebc327ff18297d0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_15_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_16_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_16_b.bin
deleted file mode 100644
index 7b02161d6d5deb34ad5643c5f2ab400bf9103f98..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_16_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_16_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_16_w.bin
deleted file mode 100644
index 2b05dc9587e787b33f7b2b28adf211fe6e0d550e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_16_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_17_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_17_b.bin
deleted file mode 100644
index 8053d6222bf43b733db86856c9a6d505290ae552..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_17_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_17_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_17_w.bin
deleted file mode 100644
index 94d17e441e49204a75092a220fe2893f9bc095a0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_17_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_18_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_18_b.bin
deleted file mode 100644
index c24f45c1e13fd1525ef69493a8234a347c56ca1a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_18_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_18_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_18_w.bin
deleted file mode 100644
index 11ab8d8a59b1dc3253a672ad6407be394c692786..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_18_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_19_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_19_b.bin
deleted file mode 100644
index 59f2fd4e1c2c0893b28c33fb7decdee4d55be77f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_19_b.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-áÚ£>'8Q¿¦qd½w0­¾´xÙ¾}JM¿*¡Ð½º÷=H•¾$E?0v&¿'±f>º7>™\[?¢Ø¾bˆ·½ñé+¿’Þ÷< /‡¾ï$¾ä-?Ô$]¿ð¿¤(æ>Ua³¾f¢›¾üR⾬|ø½Ñ³`¿§0·¾GVž¿‘‹>
-)¬>µÝ¾  ç¾O‹¾+æì½T]ô¾”f½œiÚ>@º¾J=®>úX™<6>—>G ½Ã£n>.ƒ{¾ûš³¼õwf¿o
ß¾‰3ƒ?Fh¿æÿ¿?'¿e{÷¾t¨7>-—ʽW›…½“e¸>ÿfT>b^=“#k¾ÜE»ÑÐØ=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_19_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_19_w.bin
deleted file mode 100644
index 27051b4ac907bd256f726c06ab3be15585d23de8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_19_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_1_b.bin
deleted file mode 100644
index 94029393b8058f9907a056256140cfce97527de1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-OZ¾vZ=ªVÚ¾–ä0½•œ>Ã>gv¾Ÿ:¼»Ñû4=M‡„½HN>MZK>k¢[>W‘u¼¡k>hBà=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_1_w.bin
deleted file mode 100644
index 107d0fd98a289adafcdbab0c749a721bf734df0e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_20_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_20_b.bin
deleted file mode 100644
index d6b670f77de70be9d5dccaa88a7f32fdd47eee03..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_20_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_20_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_20_w.bin
deleted file mode 100644
index 42e064bed81978adb60976c04e5dcc3ceaf87658..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_20_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_21_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_21_b.bin
deleted file mode 100644
index 71614e32a2fca812887bd23a37498a96551ca9c4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_21_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_21_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_21_w.bin
deleted file mode 100644
index 27c7ae3646b2ccdc9e4f0398bf4272657ffb4c51..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_21_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_2_b.bin
deleted file mode 100644
index 409537b3e63f395ca644dfdaee22b19528fba39a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_2_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-sÊJ>=?œ>—¦¾v†ñ¾‚Ê“>D£®:½Ší¾F”>vˆ¾ÂQ>EÀó<E&Ô> &Œ>Ž^¿ÈR´=Ùt>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_2_w.bin
deleted file mode 100644
index fa7678545554acb92e868c09ef9a5d5f0afff437..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_3_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_3_b.bin
deleted file mode 100644
index e2b3c56191fe2d3c056d69aaee7927b5aa2f49e6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_3_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-Tß´=>s}ºž!P¿"¬B>%¡<bô™>.f°>[5½Dsï=F³>uÒ–=>⟣=ý‚=…„>ôö»
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_3_w.bin
deleted file mode 100644
index 8c0c752bebf4b1bc9dee7e4ded125c692782ea38..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_4_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_4_b.bin
deleted file mode 100644
index d287b098b9f25726d70589490e423fdf19c2f81b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_4_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_4_w.bin
deleted file mode 100644
index 4ba5804be8759a711388cf3b34a7c1eba1d2fa4d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_5_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_5_b.bin
deleted file mode 100644
index 0c0f6692aa695181fa0e82622dd3b21db1848552..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_5_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-Y ƽž
½Òƽâ½Îܽšõ½0rÉ=e˜½Lïɽ,®á½TÒ’½Ùä½Åe<¼oŸ)½ÖÔ˼t®¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_5_w.bin
deleted file mode 100644
index f6b21918e43c68436d7022f9f1540eb97cb9868b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_6_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_6_b.bin
deleted file mode 100644
index 87d8b53d30170e658f55246977a2c69e5f6d92cd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_6_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_6_w.bin
deleted file mode 100644
index a45e06f87cbf0e58f8fab5fd3e896ced402a9c57..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_7_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_7_b.bin
deleted file mode 100644
index 2e939d3aee3eebebc3582952d69a28e95fc44874..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_7_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_7_w.bin
deleted file mode 100644
index 6b1ac67a9ada6a6c66c372eed3f9ca32ec047d2a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_8_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_8_b.bin
deleted file mode 100644
index 55fd1e87916460c20fe1bd0e0d7a2025e570fb5f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_8_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_8_w.bin
deleted file mode 100644
index c62b2a92999c2ad67d49685e29c7cba24bc5fdbd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_9_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_9_b.bin
deleted file mode 100644
index 92ff703707bd456815de1f90693148e611ba23ee..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_9_b.bin
+++ /dev/null
@@ -1,3 +0,0 @@
-¹ãe>ñºŒ¾	E¾ÄÑ>º©:?
-1Ò>œ²>Jkj>
¿>ª³ê=‹C?Oqß>Ô"¬>Bˆ<îüKºCæ>+ƒ-=sdÂ=ë
-˜¾ÍQ5½4½á0½<'/>¬Ô&?f“û>%–>O_?¼Zð=<è©»…W=@X€>sT¿
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_9_w.bin
deleted file mode 100644
index f4daf712a2873b532badb1e1a17240fa77e252eb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_1_b.bin
deleted file mode 100644
index a5e3994371709b14277dcbaa746b4d42c307f169..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_1_w.bin
deleted file mode 100644
index 04f224de07564c5985c35b118a33a8b60c47fc15..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_2_b.bin
deleted file mode 100644
index 9a869c3271460631f381582c3c792c9501f56e38..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_2_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_2_w.bin
deleted file mode 100644
index c9db3bf629d1b58e942d18bdbac94483aca8fe35..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/dense_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/input.bin
deleted file mode 100644
index 60f85339bdb32c1e4b4ac6df83b6bf0c9385e9bc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/resnet18_calib.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/resnet18_calib.bin
deleted file mode 100644
index 6444ccf6de9b7c064ce205fc5ca376fc268550b3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/resnet18_calib.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/resnet18_train_labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/resnet18_train_labels.bin
deleted file mode 100644
index 67843c5d2166e8c87cb22041249f1b378773e5ba..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/resnet18_train_labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/src.cc
deleted file mode 100644
index 2ba7ab5fd6ed7d8edac77ad1507c395b04b5328c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_3/src.cc
+++ /dev/null
@@ -1,193 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("resnet18_cifar10/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_2 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-void* var_3 = tensorAdd(var_2, conv2d_1_b); 
-void* var_4 = tensorRelu(var_3); 
-void* var_6 = tensorConvolution(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-void* var_7 = tensorAdd(var_6, conv2d_2_b); 
-void* var_8 = tensorRelu(var_7); 
-void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-void* var_12 = tensorAdd(var_4, var_11); 
-void* var_13 = tensorRelu(var_12); 
-void* var_15 = tensorConvolution(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-void* var_16 = tensorAdd(var_15, conv2d_4_b); 
-void* var_17 = tensorRelu(var_16); 
-void* var_19 = tensorConvolution(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-void* var_20 = tensorAdd(var_19, conv2d_5_b); 
-void* var_21 = tensorAdd(var_13, var_20); 
-void* var_22 = tensorRelu(var_21); 
-void* var_24 = tensorConvolution(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-void* var_25 = tensorAdd(var_24, conv2d_6_b); 
-void* var_26 = tensorRelu(var_25); 
-void* var_28 = tensorConvolution(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-void* var_29 = tensorAdd(var_28, conv2d_7_b); 
-void* var_30 = tensorAdd(var_22, var_29); 
-void* var_31 = tensorRelu(var_30); 
-void* var_33 = tensorConvolution(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-void* var_34 = tensorAdd(var_33, conv2d_8_b); 
-void* var_35 = tensorRelu(var_34); 
-void* var_37 = tensorConvolution(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-void* var_38 = tensorAdd(var_37, conv2d_9_b); 
-void* var_40 = tensorConvolution(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-void* var_41 = tensorAdd(var_40, conv2d_10_b); 
-void* var_42 = tensorAdd(var_41, var_38); 
-void* var_43 = tensorRelu(var_42); 
-void* var_45 = tensorConvolution(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-void* var_46 = tensorAdd(var_45, conv2d_11_b); 
-void* var_47 = tensorRelu(var_46); 
-void* var_49 = tensorConvolution(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-void* var_50 = tensorAdd(var_49, conv2d_12_b); 
-void* var_51 = tensorAdd(var_43, var_50); 
-void* var_52 = tensorRelu(var_51); 
-void* var_54 = tensorConvolution(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-void* var_55 = tensorAdd(var_54, conv2d_13_b); 
-void* var_56 = tensorRelu(var_55); 
-void* var_58 = tensorConvolution(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-void* var_59 = tensorAdd(var_58, conv2d_14_b); 
-void* var_60 = tensorAdd(var_52, var_59); 
-void* var_61 = tensorRelu(var_60); 
-void* var_63 = tensorConvolution(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-void* var_64 = tensorAdd(var_63, conv2d_15_b); 
-void* var_65 = tensorRelu(var_64); 
-void* var_67 = tensorConvolution(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-void* var_68 = tensorAdd(var_67, conv2d_16_b); 
-void* var_70 = tensorConvolution(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-void* var_71 = tensorAdd(var_70, conv2d_17_b); 
-void* var_72 = tensorAdd(var_71, var_68); 
-void* var_73 = tensorRelu(var_72); 
-void* var_75 = tensorConvolution(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-void* var_76 = tensorAdd(var_75, conv2d_18_b); 
-void* var_77 = tensorRelu(var_76); 
-void* var_79 = tensorConvolution(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-void* var_80 = tensorAdd(var_79, conv2d_19_b); 
-void* var_81 = tensorAdd(var_73, var_80); 
-void* var_82 = tensorRelu(var_81); 
-void* var_84 = tensorConvolution(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-void* var_85 = tensorAdd(var_84, conv2d_20_b); 
-void* var_86 = tensorRelu(var_85); 
-void* var_88 = tensorConvolution(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-void* var_89 = tensorAdd(var_88, conv2d_21_b); 
-void* var_90 = tensorAdd(var_82, var_89); 
-void* var_91 = tensorRelu(var_90); 
-void* var_92 = tensorPooling(var_91,1,8,8,0,0,8,8); 
-void* var_94 = tensorGemmGPU(var_92, dense_1_w); 
-void* var_95 = tensorAdd(var_94, dense_1_b); 
-void* var_96 = tensorSoftmax(var_95); 
-
-computeAccuracy2(labels,10000,var_96); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_10_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_10_b.bin
deleted file mode 100644
index dbdb7c37b64e8e16a6ea093d4d6e4f1af8258610..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_10_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_10_w.bin
deleted file mode 100644
index 9c188f6f249781fc6aca77709f7dba4231072765..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_11_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_11_b.bin
deleted file mode 100644
index 97b8ebe8bdc9d4e5ca74e6126eca1e14f5bfd3db..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_11_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_11_w.bin
deleted file mode 100644
index 10fa1ebe08c6712f341d2d270492643fda102014..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_12_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_12_b.bin
deleted file mode 100644
index 8e713d6176cac919a63d2ea8ed9fa47fe800db05..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_12_b.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-)¯¼YÅG¾^þ™½-‚ä¾R¾Ý¾‰c¾l2&?D¾
-"q<~øb¾ÌîY¾LmÞ½hÍ$¾ÌÅg¾]db¾V&¾ÿ
p¾À’`¾M·4½Õ­ø=š×¾õ: ½Ž“½_ºN½µf¼Û]K¾à·õ½>°ù¼“(½®èྮb<
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_12_w.bin
deleted file mode 100644
index 465c40b612851a9bd398ce5d24c0b25982db8a67..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_13_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_13_b.bin
deleted file mode 100644
index 84fdd64bf1381f65f3ddd65913b5c0f469f49daa..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_13_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-¶‡“¼ž®Y¾$,Ö¼Y'm½"0Ž*„6¿(Ö¿äšÌ¾§þ¿¾IX/¾Û^l¼VÌú¼'P‹¾B‹[½·õa½:n¿­î@¾F•¼¿áVè¼#…¿'©›¿¸{j¾~º½ÿ޾PXi¾ñs¹½ÿƒ›¿ÂV¿¸Žà½BD¾eò™¾Ø¥½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_13_w.bin
deleted file mode 100644
index 6191b0390b3ffbb7424fda7653b609022adbaa2e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_14_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_14_b.bin
deleted file mode 100644
index e2f04a5e2ffbce542423a97f67ddc3dc8276abbd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_14_b.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-Dz\¾—ý(¾n¬½ãå¾0
-¾h.¾á"Œ¾â%ˆ>†Ÿ¾BÁ½B¦¿_V¾e“ѽD„¾EàϽUIf¾Ç3N¾zì7¾7¨a¾–Ž‚¾3í¼Ã?¾Ðl¶½üß…¾Ss¡½µ
оïޏ¾WŸ¾ô¾í÷³¾³Õ½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_14_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_14_w.bin
deleted file mode 100644
index d1b63e862f10a2e7c9db4e34db50869a6b7de295..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_14_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_15_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_15_b.bin
deleted file mode 100644
index 81abea9153be67ff7867c2939ca6afca9e7a7c2e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_15_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_15_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_15_w.bin
deleted file mode 100644
index 2bfe843e43ab3a638cf74ec4b5890f7402f16368..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_15_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_16_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_16_b.bin
deleted file mode 100644
index a674bac343d6cb3b1396a01ec1c23c6406eb0687..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_16_b.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-Л>g܇?5þ½ì¹¼a¢';7Ð+¼t¾¤h¿è1”>ªª¾èêl?ÅŠ>n>ý«=w2¾Uf­?k&?ñã!>	ã‘>ø!½ß‹½âuD=¦d¾!6Á¾Cb?ûÄ輆¤¼¸Ñ¿j½–B·=œœY½Æ+®>9ø]½Š
-î<Q¾ˆlO½»®9È“¾´$>³Ð ½q(?S)¾T¾Œ'>€ù…>¡^¾U2?µ”œ>Ϙ¾…]¿‹úZ½ƒNá½U½ÔY>qÅ#>›bä>B"?J˜?m+ý¾¹zɾ“½½Œ*z¼mc?Ç-x=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_16_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_16_w.bin
deleted file mode 100644
index c4c291ec63cf370ee1a159b7c776cb095eb419aa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_16_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_17_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_17_b.bin
deleted file mode 100644
index 56a0d3dba4cf3ee1098107daab9e9a5ef89a8f7e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_17_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_17_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_17_w.bin
deleted file mode 100644
index 94460a2a9c014acf06b561d10e8e342df6416c79..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_17_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_18_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_18_b.bin
deleted file mode 100644
index 812f0bc63a04eee434f32e44b08c535a6fff0e1f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_18_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_18_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_18_w.bin
deleted file mode 100644
index 22d04d201690e95c09dd39b2e62395356a91a610..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_18_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_19_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_19_b.bin
deleted file mode 100644
index 264ebf9db7b81d0c58bf42367964001796bb7007..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_19_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_19_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_19_w.bin
deleted file mode 100644
index 26692e859c2ff6a6f466c5280eb0a9ec13e7ba3d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_19_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_1_b.bin
deleted file mode 100644
index d8fa20287f246359d687a7e1c8a9add5b0d482b1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-¤>>w%¾uй¾Õ>ƒ¸ã½P±>t'=|,¾šó:>Û/í=£Æ=1ôF>wÖ,½Ã/?¾á`¤»
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_1_w.bin
deleted file mode 100644
index 2b28d11d7087f8482ca6b85614bac9c5d89e86ba..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_20_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_20_b.bin
deleted file mode 100644
index c1e9487d5c2dbf612e1d405894b4be57ac86931a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_20_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_20_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_20_w.bin
deleted file mode 100644
index 19bb82b16e0c852ef7de1133581570de4e5163d4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_20_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_21_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_21_b.bin
deleted file mode 100644
index f8e4509e626f855566c430560c33501c741d82cd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_21_b.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-oD??êÙ€?àkü½žHÝ>Ô61¾½<=¡¶(>Ž—¿œˆ>"W«¿¨^B½F[$¿:’ì>¾ýë¼Í
-'¿]3¡>Üx!¿¿N?}Þ=oCÀ”=¾é½*¢Ÿ>í#œ¿Gk?ŠxÞ>YÌ¢¾$l¢¼)Ó¿² ¿6¸q¿ä—¿,à>?ÿѽ£ïξFlü¾%»¾ò—X¿³¶:>IZ
À±@q½bõ‰½‚yŸ¾Ü&,¿÷‰µ¿‹8¿qÞD?*ƒ;¿­û뽚ۗ½‰­>ô'*¿pÑ¡¾—Á>Âý\>*Y5¾åsP½Êè’>E¥¿—1Q¿¢*)¾ç©–¿GSµ¿›JÔ¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_21_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_21_w.bin
deleted file mode 100644
index 2f27e0df6f33191954029c4586e856cd8521af10..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_21_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_2_b.bin
deleted file mode 100644
index 66f62621d56c6d4713819980a3fd8511c6a01dcf..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_2_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-ç‹=߇˜>Ä@Y>žÁ9>âË»Sé>“4W>Ó€"=.ÊU½l‘¬>¬ÅÕ>!ì=\¿V>µ¤>2ô¼
e޾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_2_w.bin
deleted file mode 100644
index 9f03691a7faef92691bc8761bd89cd62bda7b516..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_3_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_3_b.bin
deleted file mode 100644
index 7e9cb4e6ec2336a8230354a1ff90f45bf2f29d66..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_3_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-¹¥¶=MÖt¾À,J>Ê;\¿éX‹½4û;=–šF>«+¾i¬¦½Õ¹ß=G¹=Ø)3½–c&½>bμž±>}¬°½
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_3_w.bin
deleted file mode 100644
index 5eaf436d696ce8a262beb897bf87732d19a61f54..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_4_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_4_b.bin
deleted file mode 100644
index 9c979c9d8997c551b5121a77587a6b014c8dbd2e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_4_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-Äa>½`‹>!ƽß	>¾¶½}K›>!Ã3¿õc>vLN>ÊU½Ö!>ÍÙ5>U]¾`þ¸¾úÈ=lq¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_4_w.bin
deleted file mode 100644
index d5ef72fda73527d7c94a087005a2ebeac75e96ca..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_5_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_5_b.bin
deleted file mode 100644
index c116be8c273f8abd1f40a7b446bffcb14f74df24..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_5_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-”-¾‰óݽDb½DF½œX“½¦Ñнl®¡½Ï•S¾[6>œÃ–½Ï¤.½PÖÜ<\mN½ÓrI½æX™½êG¢¼
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_5_w.bin
deleted file mode 100644
index 687e514cbff23bf4c4ee12f5068c5ee081cf7fd0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_6_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_6_b.bin
deleted file mode 100644
index a00bc8b1ffc5ca67d5aa827e9d74b34123e2f483..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_6_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-iA¾vÿ=‡<¿™ˆ•=ÉÎG¿šö&¿žVò½°ñ½'<¿ìûw½ÕÍQ¿¥`Å>óÁ½¼t¾â¨¾=7UK=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_6_w.bin
deleted file mode 100644
index a5db3c15d9175bb5473981b4c0bbe58bce141835..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_7_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_7_b.bin
deleted file mode 100644
index 201aea2f09a83bf5ad755304aa63342a036b659d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_7_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-A8µ½ ˆ¼IÂr¾÷·*¼¥¾“½ m¶¾ Y¾Že>Í޽ܺ¾Š(ü¼y.6¾Ð‹lºÏ̍½
ž¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_7_w.bin
deleted file mode 100644
index 870a5719406d392aaaaef8ce38a126ed03cafd9a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_8_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_8_b.bin
deleted file mode 100644
index 0fb9f0830c7ffe96255354de4d9e5b0378dfb6a6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_8_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-Øwã>ëÒé>Ý4F?ýÜ;=Ô\¾‚\ô<[fõ¼¶Ðè»w@-¾2C¹½ƒßk>G㝽ê	½#t6>Ô¼=Ø9×®=Sï¾>œ6?Õ@
>aý>é?}#’>Ñ8Ç>Y«¬<UÀ>A ¾ªð—¾ìª=FR¸½ãI>D>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_8_w.bin
deleted file mode 100644
index 41495fea2fc429d0d9bf3f8afc9ff9879d35618d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_9_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_9_b.bin
deleted file mode 100644
index dbdb7c37b64e8e16a6ea093d4d6e4f1af8258610..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_9_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_9_w.bin
deleted file mode 100644
index c5618b47d9bd4417cd36d48d9288759a8f955477..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/dense_1_b.bin
deleted file mode 100644
index 5df6e08172e8ad71bf72131d8e7afa8389c77514..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/dense_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-×Ö¦½Óü¾ôe޾ƒ‘>[>?°Ã­¾„ƒ:>Aù½>n‰>>¸>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/dense_1_w.bin
deleted file mode 100644
index eb7245db1d3e84e47b1a2eeb3e35126cfdf2764c..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/input.bin
deleted file mode 100644
index 7426b7222304d2a59fb7619761969b70bcc5d2c4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/labels.bin
deleted file mode 100644
index d386ac9dde8313657aac92e874fe25a36277bd86..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/labels32.bin b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/labels32.bin
deleted file mode 100644
index c501aed519a8c7d79c189e34735deb2c09f24d75..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/labels32.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/promise_src.cc
deleted file mode 100644
index f6e7e32153a5e89a68798f809cf4166285b408ea..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/promise_src.cc
+++ /dev/null
@@ -1,162 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-
-std::string dir_prefix = std::string("resnet18_cifar10_promise/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_0 = ConvLayer_PROMISE(input, -0.5500815, 0.60786617, conv2d_1_w, -1.0248864, 1.2929907, conv2d_1_b, -0.36291853, 0.2533059, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.9356618, 9); 
-void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 2.9356618, conv2d_2_w, -0.69884616, 0.71849966, conv2d_2_b, -0.2781147, 0.45571187, 1, 1, 1, 1, -1, 0, 1, 0.0, 4.0425158, 9); 
-void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 4.0425158, conv2d_3_w, -0.59568167, 0.7714691, conv2d_3_b, -0.8602873, 0.19743633, 1, 1, 1, 1, -1, 0, -1, -10.203314, 9.055045, 9); 
-void* var_3 = tensorAdd(var_0, var_2); 
-void* var_4 = tensorRelu(var_3); 
-void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 9.734258, conv2d_4_w, -0.41976976, 0.43748936, conv2d_4_b, -0.7021962, 0.3033103, 1, 1, 1, 1, -1, 0, 1, 0.0, 7.0053105, 9); 
-void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 7.0053105, conv2d_5_w, -0.46757826, 0.4635873, conv2d_5_b, -0.20662616, 0.1778044, 1, 1, 1, 1, -1, 0, -1, -4.8778534, 6.7311873, 9); 
-void* var_7 = tensorAdd(var_4, var_6); 
-void* var_8 = tensorRelu(var_7); 
-void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 10.858562, conv2d_6_w, -0.64404047, 0.45383143, conv2d_6_b, -0.819547, 0.38550296, 1, 1, 1, 1, -1, 0, 1, 0.0, 8.843336, 9); 
-void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 8.843336, conv2d_7_w, -0.41986948, 0.33654243, conv2d_7_b, -0.3563013, 0.22371122, 1, 1, 1, 1, -1, 0, -1, -10.204111, 5.4952374, 9); 
-void* var_11 = tensorAdd(var_8, var_10); 
-void* var_12 = tensorRelu(var_11); 
-void* var_13 = ConvLayer_PROMISE(var_12, 0.0, 11.359337, conv2d_8_w, -0.4805263, 0.50655717, conv2d_8_b, -0.296758, 0.7742441, 1, 1, 2, 2, -1, 0, 1, 0.0, 19.303282, 9); 
-void* var_14 = ConvLayer_PROMISE(var_13, 0.0, 19.303282, conv2d_9_w, -0.52083415, 0.45517674, conv2d_9_b, -0.20242067, 0.8236838, 1, 1, 1, 1, -1, 0, -1, -24.404322, 24.37361, 9); 
-void* var_15 = ConvLayer_PROMISE(var_12, 0.0, 11.359337, conv2d_10_w, -0.5338656, 1.3395424, conv2d_10_b, -0.20242067, 0.8236838, 0, 0, 2, 2, -1, 0, -1, -6.497986, 12.8968935, 9); 
-void* var_16 = tensorAdd(var_15, var_14); 
-void* var_17 = tensorRelu(var_16); 
-void* var_18 = ConvLayer_PROMISE(var_17, 0.0, 29.462997, conv2d_11_w, -0.34429058, 0.43629733, conv2d_11_b, -1.0744808, 0.056708273, 1, 1, 1, 1, -1, 0, 1, 0.0, 24.329395, 9); 
-void* var_19 = ConvLayer_PROMISE(var_18, 0.0, 24.329395, conv2d_12_w, -0.30342352, 0.39493486, conv2d_12_b, -0.44630566, 0.6492069, 1, 1, 1, 1, -1, 0, -1, -9.780206, 20.320444, 9); 
-void* var_20 = tensorAdd(var_17, var_19); 
-void* var_21 = tensorRelu(var_20); 
-void* var_22 = ConvLayer_PROMISE(var_21, 0.0, 29.647404, conv2d_13_w, -0.38351893, 0.45775774, conv2d_13_b, -1.4733055, -0.014426912, 1, 1, 1, 1, -1, 0, 1, 0.0, 25.600573, 9); 
-void* var_23 = ConvLayer_PROMISE(var_22, 0.0, 25.600573, conv2d_14_w, -0.25695276, 0.45372736, conv2d_14_b, -0.5259744, 0.26591402, 1, 1, 1, 1, -1, 0, -1, -10.179335, 27.598986, 9); 
-void* var_24 = tensorAdd(var_21, var_23); 
-void* var_25 = tensorRelu(var_24); 
-void* var_26 = ConvLayer_PROMISE(var_25, 0.0, 42.450073, conv2d_15_w, -0.55299705, 0.5443531, conv2d_15_b, -0.71790683, 1.2730768, 1, 1, 2, 2, -1, 0, 1, 0.0, 48.943645, 9); 
-void* var_27 = ConvLayer_PROMISE(var_26, 0.0, 48.943645, conv2d_16_w, -0.4203967, 0.48641303, conv2d_16_b, -0.90653443, 1.3546854, 1, 1, 1, 1, -1, 0, -1, -75.016396, 112.3873, 9); 
-void* var_28 = ConvLayer_PROMISE(var_25, 0.0, 42.450073, conv2d_17_w, -0.4365755, 0.84913826, conv2d_17_b, -0.90653443, 1.3546851, 0, 0, 2, 2, -1, 0, -1, -13.93596, 24.614315, 9); 
-void* var_29 = tensorAdd(var_28, var_27); 
-void* var_30 = tensorRelu(var_29); 
-void* var_31 = ConvLayer_PROMISE(var_30, 0.0, 126.758545, conv2d_18_w, -0.38657624, 0.5228989, conv2d_18_b, -1.2083547, 0.76361173, 1, 1, 1, 1, -1, 0, 1, 0.0, 130.02768, 9); 
-void* var_32 = ConvLayer_PROMISE(var_31, 0.0, 130.02768, conv2d_19_w, -0.40857902, 0.575035, conv2d_19_b, -1.8731614, 1.0960501, 1, 1, 1, 1, -1, 0, -1, -97.91927, 130.89008, 9); 
-void* var_33 = tensorAdd(var_30, var_32); 
-void* var_34 = tensorRelu(var_33); 
-void* var_35 = ConvLayer_PROMISE(var_34, 0.0, 220.4094, conv2d_20_w, -0.33079496, 0.5893278, conv2d_20_b, -1.0234511, 1.0016295, 1, 1, 1, 1, -1, 0, 1, 0.0, 268.19254, 9); 
-void* var_36 = ConvLayer_PROMISE(var_35, 0.0, 268.19254, conv2d_21_w, -0.27897888, 0.38280907, conv2d_21_b, -2.2086356, 1.0066502, 1, 1, 1, 1, -1, 0, -1, -235.08034, 264.33008, 9); 
-void* var_37 = tensorAdd(var_34, var_36); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorPooling(var_38,1,8,8,0,0,8,8); 
-void* var_40 = FCLayer_PROMISE(var_39, 0.0, 39.821262, dense_1_w, -1.5092047, 1.0279838, dense_1_b, -0.49379802, 0.61032647, -1, -84.49565, 60.312202, 9); 
-void* var_41 = tensorSoftmax(var_40); 
-
-computeAccuracy2(labels,10000,var_41); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/src.cc
deleted file mode 100644
index e82c54651aaa8b8df2ab34b65490aa79b730c327..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/resnet18_cifar10_promise/src.cc
+++ /dev/null
@@ -1,193 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("resnet18_cifar10_promise/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,16,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,16,16,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,16,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,32,16,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,32,16,1,1); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_14_w_path =  dir_prefix + std::string("conv2d_14_w.bin"); 
-void* conv2d_14_w =  readTrainedWeights(conv2d_14_w_path.c_str(), 0,32,32,3,3); 
-std::string conv2d_14_b_path =  dir_prefix + std::string("conv2d_14_b.bin"); 
-void* conv2d_14_b =  readTrainedWeights(conv2d_14_b_path.c_str(), 0,1,32,1,1); 
-std::string conv2d_15_w_path =  dir_prefix + std::string("conv2d_15_w.bin"); 
-void* conv2d_15_w =  readTrainedWeights(conv2d_15_w_path.c_str(), 0,64,32,3,3); 
-std::string conv2d_15_b_path =  dir_prefix + std::string("conv2d_15_b.bin"); 
-void* conv2d_15_b =  readTrainedWeights(conv2d_15_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_17_w_path =  dir_prefix + std::string("conv2d_17_w.bin"); 
-void* conv2d_17_w =  readTrainedWeights(conv2d_17_w_path.c_str(), 0,64,32,1,1); 
-std::string conv2d_17_b_path =  dir_prefix + std::string("conv2d_17_b.bin"); 
-void* conv2d_17_b =  readTrainedWeights(conv2d_17_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_16_w_path =  dir_prefix + std::string("conv2d_16_w.bin"); 
-void* conv2d_16_w =  readTrainedWeights(conv2d_16_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_16_b_path =  dir_prefix + std::string("conv2d_16_b.bin"); 
-void* conv2d_16_b =  readTrainedWeights(conv2d_16_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_18_w_path =  dir_prefix + std::string("conv2d_18_w.bin"); 
-void* conv2d_18_w =  readTrainedWeights(conv2d_18_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_18_b_path =  dir_prefix + std::string("conv2d_18_b.bin"); 
-void* conv2d_18_b =  readTrainedWeights(conv2d_18_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_19_w_path =  dir_prefix + std::string("conv2d_19_w.bin"); 
-void* conv2d_19_w =  readTrainedWeights(conv2d_19_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_19_b_path =  dir_prefix + std::string("conv2d_19_b.bin"); 
-void* conv2d_19_b =  readTrainedWeights(conv2d_19_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_20_w_path =  dir_prefix + std::string("conv2d_20_w.bin"); 
-void* conv2d_20_w =  readTrainedWeights(conv2d_20_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_20_b_path =  dir_prefix + std::string("conv2d_20_b.bin"); 
-void* conv2d_20_b =  readTrainedWeights(conv2d_20_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_21_w_path =  dir_prefix + std::string("conv2d_21_w.bin"); 
-void* conv2d_21_w =  readTrainedWeights(conv2d_21_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_21_b_path =  dir_prefix + std::string("conv2d_21_b.bin"); 
-void* conv2d_21_b =  readTrainedWeights(conv2d_21_b_path.c_str(), 0,1,64,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,64,10); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_2 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-void* var_3 = tensorAdd(var_2, conv2d_1_b); 
-void* var_4 = tensorRelu(var_3); 
-void* var_6 = tensorConvolution(var_4, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-void* var_7 = tensorAdd(var_6, conv2d_2_b); 
-void* var_8 = tensorRelu(var_7); 
-void* var_10 = tensorConvolution(var_8, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-void* var_11 = tensorAdd(var_10, conv2d_3_b); 
-void* var_12 = tensorAdd(var_4, var_11); 
-void* var_13 = tensorRelu(var_12); 
-void* var_15 = tensorConvolution(var_13, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-void* var_16 = tensorAdd(var_15, conv2d_4_b); 
-void* var_17 = tensorRelu(var_16); 
-void* var_19 = tensorConvolution(var_17, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-void* var_20 = tensorAdd(var_19, conv2d_5_b); 
-void* var_21 = tensorAdd(var_13, var_20); 
-void* var_22 = tensorRelu(var_21); 
-void* var_24 = tensorConvolution(var_22, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-void* var_25 = tensorAdd(var_24, conv2d_6_b); 
-void* var_26 = tensorRelu(var_25); 
-void* var_28 = tensorConvolution(var_26, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-void* var_29 = tensorAdd(var_28, conv2d_7_b); 
-void* var_30 = tensorAdd(var_22, var_29); 
-void* var_31 = tensorRelu(var_30); 
-void* var_33 = tensorConvolution(var_31, conv2d_8_w, 1, 1, 2, 2, 1, 0); 
-void* var_34 = tensorAdd(var_33, conv2d_8_b); 
-void* var_35 = tensorRelu(var_34); 
-void* var_37 = tensorConvolution(var_35, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-void* var_38 = tensorAdd(var_37, conv2d_9_b); 
-void* var_40 = tensorConvolution(var_31, conv2d_10_w, 0, 0, 2, 2, 1, 0); 
-void* var_41 = tensorAdd(var_40, conv2d_10_b); 
-void* var_42 = tensorAdd(var_41, var_38); 
-void* var_43 = tensorRelu(var_42); 
-void* var_45 = tensorConvolution(var_43, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-void* var_46 = tensorAdd(var_45, conv2d_11_b); 
-void* var_47 = tensorRelu(var_46); 
-void* var_49 = tensorConvolution(var_47, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-void* var_50 = tensorAdd(var_49, conv2d_12_b); 
-void* var_51 = tensorAdd(var_43, var_50); 
-void* var_52 = tensorRelu(var_51); 
-void* var_54 = tensorConvolution(var_52, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-void* var_55 = tensorAdd(var_54, conv2d_13_b); 
-void* var_56 = tensorRelu(var_55); 
-void* var_58 = tensorConvolution(var_56, conv2d_14_w, 1, 1, 1, 1, 1, 0); 
-void* var_59 = tensorAdd(var_58, conv2d_14_b); 
-void* var_60 = tensorAdd(var_52, var_59); 
-void* var_61 = tensorRelu(var_60); 
-void* var_63 = tensorConvolution(var_61, conv2d_15_w, 1, 1, 2, 2, 1, 0); 
-void* var_64 = tensorAdd(var_63, conv2d_15_b); 
-void* var_65 = tensorRelu(var_64); 
-void* var_67 = tensorConvolution(var_65, conv2d_16_w, 1, 1, 1, 1, 1, 0); 
-void* var_68 = tensorAdd(var_67, conv2d_16_b); 
-void* var_70 = tensorConvolution(var_61, conv2d_17_w, 0, 0, 2, 2, 1, 0); 
-void* var_71 = tensorAdd(var_70, conv2d_17_b); 
-void* var_72 = tensorAdd(var_71, var_68); 
-void* var_73 = tensorRelu(var_72); 
-void* var_75 = tensorConvolution(var_73, conv2d_18_w, 1, 1, 1, 1, 1, 0); 
-void* var_76 = tensorAdd(var_75, conv2d_18_b); 
-void* var_77 = tensorRelu(var_76); 
-void* var_79 = tensorConvolution(var_77, conv2d_19_w, 1, 1, 1, 1, 1, 0); 
-void* var_80 = tensorAdd(var_79, conv2d_19_b); 
-void* var_81 = tensorAdd(var_73, var_80); 
-void* var_82 = tensorRelu(var_81); 
-void* var_84 = tensorConvolution(var_82, conv2d_20_w, 1, 1, 1, 1, 1, 0); 
-void* var_85 = tensorAdd(var_84, conv2d_20_b); 
-void* var_86 = tensorRelu(var_85); 
-void* var_88 = tensorConvolution(var_86, conv2d_21_w, 1, 1, 1, 1, 1, 0); 
-void* var_89 = tensorAdd(var_88, conv2d_21_b); 
-void* var_90 = tensorAdd(var_82, var_89); 
-void* var_91 = tensorRelu(var_90); 
-void* var_92 = tensorPooling(var_91,1,8,8,0,0,8,8); 
-void* var_94 = tensorGemmGPU(var_92, dense_1_w); 
-void* var_95 = tensorAdd(var_94, dense_1_b); 
-void* var_96 = tensorSoftmax(var_95); 
-
-computeAccuracy2(labels,10000,var_96); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/approxhpvm_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/approxhpvm_src.cc
deleted file mode 100644
index 8084e3723a6141ac0e99729b8455bcc529ac7a0f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/approxhpvm_src.cc
+++ /dev/null
@@ -1,982 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/stat.h> 
-#include <cstring> 
-#include <visc.h> 
-#include <tensorTypes.h> 
-#include <tensorUtils.h> 
-
-void var_0_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_1_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_2_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_3_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_4_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_5_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_6_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_7_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_8_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_9_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_10_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_11_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_12_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_13_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_14_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_15_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_16_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_17_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_18_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_19_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_20_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_21_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_22_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_23_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_24_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_25_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_26_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_27_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_28_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_29_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_30_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_31_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_32_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_33_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_34_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_35_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_36_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_37_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_38_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_39_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_40_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_convolution(t1, t2, 1, 1, 1, 1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_41_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_42_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_43_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_pool_max(t1, 2, 2, 0, 0, 2, 2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_44_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_mul(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_45_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_46_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_relu(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_47_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_mul(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_48_node(void* t1, size_t bytes_t1, void* t2, size_t bytes_t2) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(2, t1, t2, 0); 
-
-  void *r = __visc__tensor_add(t1, t2); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void var_49_node(void* t1, size_t bytes_t1) { 
-  __visc__hint(visc::CUDNN_TARGET); 
-  __visc__attributes(1, t1, 0); 
-
-  void* r = __visc__tensor_softmax(t1); 
-  __visc__return(2, r, (size_t) 0); 
-}
-
-void root(void* input, size_t input_bytes, 
-	  void* conv2d_1_w, size_t conv2d_1_w_bytes, 
-	  void* conv2d_1_b, size_t conv2d_1_b_bytes, 
-	  void* conv2d_2_w, size_t conv2d_2_w_bytes, 
-	  void* conv2d_2_b, size_t conv2d_2_b_bytes, 
-	  void* conv2d_3_w, size_t conv2d_3_w_bytes, 
-	  void* conv2d_3_b, size_t conv2d_3_b_bytes, 
-	  void* conv2d_4_w, size_t conv2d_4_w_bytes, 
-	  void* conv2d_4_b, size_t conv2d_4_b_bytes, 
-	  void* conv2d_5_w, size_t conv2d_5_w_bytes, 
-	  void* conv2d_5_b, size_t conv2d_5_b_bytes, 
-	  void* conv2d_6_w, size_t conv2d_6_w_bytes, 
-	  void* conv2d_6_b, size_t conv2d_6_b_bytes, 
-	  void* conv2d_7_w, size_t conv2d_7_w_bytes, 
-	  void* conv2d_7_b, size_t conv2d_7_b_bytes, 
-	  void* conv2d_8_w, size_t conv2d_8_w_bytes, 
-	  void* conv2d_8_b, size_t conv2d_8_b_bytes, 
-	  void* conv2d_9_w, size_t conv2d_9_w_bytes, 
-	  void* conv2d_9_b, size_t conv2d_9_b_bytes, 
-	  void* conv2d_10_w, size_t conv2d_10_w_bytes, 
-	  void* conv2d_10_b, size_t conv2d_10_b_bytes, 
-	  void* conv2d_11_w, size_t conv2d_11_w_bytes, 
-	  void* conv2d_11_b, size_t conv2d_11_b_bytes, 
-	  void* conv2d_12_w, size_t conv2d_12_w_bytes, 
-	  void* conv2d_12_b, size_t conv2d_12_b_bytes, 
-	  void* conv2d_13_w, size_t conv2d_13_w_bytes, 
-	  void* conv2d_13_b, size_t conv2d_13_b_bytes, 
-	  void* dense_1_w, size_t dense_1_w_bytes, 
-	  void* dense_1_b, size_t dense_1_b_bytes, 
-	  void* dense_2_w, size_t dense_2_w_bytes, 
-	  void* dense_2_b, size_t dense_2_b_bytes){ 
-
-
-  __visc__hint(visc::CPU_TARGET); 
-  __visc__attributes(31, input, conv2d_1_w, conv2d_1_b, conv2d_2_w, conv2d_2_b, conv2d_3_w, conv2d_3_b, conv2d_4_w, conv2d_4_b, conv2d_5_w, conv2d_5_b, conv2d_6_w, conv2d_6_b, conv2d_7_w, conv2d_7_b, conv2d_8_w, conv2d_8_b, conv2d_9_w, conv2d_9_b, conv2d_10_w, conv2d_10_b, conv2d_11_w, conv2d_11_b, conv2d_12_w, conv2d_12_b, conv2d_13_w, conv2d_13_b, dense_1_w, dense_1_b, dense_2_w, dense_2_b, 0); 
-
-
-  void* var_0 = __visc__createNodeND(0, var_0_node); 
-
-  __visc__bindIn(var_0, 0, 0, 0); 
-  __visc__bindIn(var_0, 1, 1, 0); 
-  __visc__bindIn(var_0, 2, 2, 0); 
-  __visc__bindIn(var_0, 3, 3, 0); 
-
-  void* var_1 = __visc__createNodeND(0, var_1_node); 
-
-  __visc__edge(var_0, var_1, 1, 0, 0, 0); 
-  __visc__edge(var_0, var_1, 1, 1, 1, 0); 
-  __visc__bindIn(var_1, 4, 2, 0); 
-  __visc__bindIn(var_1, 5, 3, 0); 
-
-  void* var_2 = __visc__createNodeND(0, var_2_node); 
-
-  __visc__edge(var_1, var_2, 1, 0, 0, 0); 
-  __visc__edge(var_1, var_2, 1, 1, 1, 0); 
-
-  void* var_3 = __visc__createNodeND(0, var_3_node); 
-
-  __visc__edge(var_2, var_3, 1, 0, 0, 0); 
-  __visc__edge(var_2, var_3, 1, 1, 1, 0); 
-  __visc__bindIn(var_3, 6, 2, 0); 
-  __visc__bindIn(var_3, 7, 3, 0); 
-
-  void* var_4 = __visc__createNodeND(0, var_4_node); 
-
-  __visc__edge(var_3, var_4, 1, 0, 0, 0); 
-  __visc__edge(var_3, var_4, 1, 1, 1, 0); 
-  __visc__bindIn(var_4, 8, 2, 0); 
-  __visc__bindIn(var_4, 9, 3, 0); 
-
-  void* var_5 = __visc__createNodeND(0, var_5_node); 
-
-  __visc__edge(var_4, var_5, 1, 0, 0, 0); 
-  __visc__edge(var_4, var_5, 1, 1, 1, 0); 
-
-  void* var_6 = __visc__createNodeND(0, var_6_node); 
-
-  __visc__edge(var_5, var_6, 1, 0, 0, 0); 
-  __visc__edge(var_5, var_6, 1, 1, 1, 0); 
-
-  void* var_7 = __visc__createNodeND(0, var_7_node); 
-
-  __visc__edge(var_6, var_7, 1, 0, 0, 0); 
-  __visc__edge(var_6, var_7, 1, 1, 1, 0); 
-  __visc__bindIn(var_7, 10, 2, 0); 
-  __visc__bindIn(var_7, 11, 3, 0); 
-
-  void* var_8 = __visc__createNodeND(0, var_8_node); 
-
-  __visc__edge(var_7, var_8, 1, 0, 0, 0); 
-  __visc__edge(var_7, var_8, 1, 1, 1, 0); 
-  __visc__bindIn(var_8, 12, 2, 0); 
-  __visc__bindIn(var_8, 13, 3, 0); 
-
-  void* var_9 = __visc__createNodeND(0, var_9_node); 
-
-  __visc__edge(var_8, var_9, 1, 0, 0, 0); 
-  __visc__edge(var_8, var_9, 1, 1, 1, 0); 
-
-  void* var_10 = __visc__createNodeND(0, var_10_node); 
-
-  __visc__edge(var_9, var_10, 1, 0, 0, 0); 
-  __visc__edge(var_9, var_10, 1, 1, 1, 0); 
-  __visc__bindIn(var_10, 14, 2, 0); 
-  __visc__bindIn(var_10, 15, 3, 0); 
-
-  void* var_11 = __visc__createNodeND(0, var_11_node); 
-
-  __visc__edge(var_10, var_11, 1, 0, 0, 0); 
-  __visc__edge(var_10, var_11, 1, 1, 1, 0); 
-  __visc__bindIn(var_11, 16, 2, 0); 
-  __visc__bindIn(var_11, 17, 3, 0); 
-
-  void* var_12 = __visc__createNodeND(0, var_12_node); 
-
-  __visc__edge(var_11, var_12, 1, 0, 0, 0); 
-  __visc__edge(var_11, var_12, 1, 1, 1, 0); 
-
-  void* var_13 = __visc__createNodeND(0, var_13_node); 
-
-  __visc__edge(var_12, var_13, 1, 0, 0, 0); 
-  __visc__edge(var_12, var_13, 1, 1, 1, 0); 
-
-  void* var_14 = __visc__createNodeND(0, var_14_node); 
-
-  __visc__edge(var_13, var_14, 1, 0, 0, 0); 
-  __visc__edge(var_13, var_14, 1, 1, 1, 0); 
-  __visc__bindIn(var_14, 18, 2, 0); 
-  __visc__bindIn(var_14, 19, 3, 0); 
-
-  void* var_15 = __visc__createNodeND(0, var_15_node); 
-
-  __visc__edge(var_14, var_15, 1, 0, 0, 0); 
-  __visc__edge(var_14, var_15, 1, 1, 1, 0); 
-  __visc__bindIn(var_15, 20, 2, 0); 
-  __visc__bindIn(var_15, 21, 3, 0); 
-
-  void* var_16 = __visc__createNodeND(0, var_16_node); 
-
-  __visc__edge(var_15, var_16, 1, 0, 0, 0); 
-  __visc__edge(var_15, var_16, 1, 1, 1, 0); 
-
-  void* var_17 = __visc__createNodeND(0, var_17_node); 
-
-  __visc__edge(var_16, var_17, 1, 0, 0, 0); 
-  __visc__edge(var_16, var_17, 1, 1, 1, 0); 
-  __visc__bindIn(var_17, 22, 2, 0); 
-  __visc__bindIn(var_17, 23, 3, 0); 
-
-  void* var_18 = __visc__createNodeND(0, var_18_node); 
-
-  __visc__edge(var_17, var_18, 1, 0, 0, 0); 
-  __visc__edge(var_17, var_18, 1, 1, 1, 0); 
-  __visc__bindIn(var_18, 24, 2, 0); 
-  __visc__bindIn(var_18, 25, 3, 0); 
-
-  void* var_19 = __visc__createNodeND(0, var_19_node); 
-
-  __visc__edge(var_18, var_19, 1, 0, 0, 0); 
-  __visc__edge(var_18, var_19, 1, 1, 1, 0); 
-
-  void* var_20 = __visc__createNodeND(0, var_20_node); 
-
-  __visc__edge(var_19, var_20, 1, 0, 0, 0); 
-  __visc__edge(var_19, var_20, 1, 1, 1, 0); 
-  __visc__bindIn(var_20, 26, 2, 0); 
-  __visc__bindIn(var_20, 27, 3, 0); 
-
-  void* var_21 = __visc__createNodeND(0, var_21_node); 
-
-  __visc__edge(var_20, var_21, 1, 0, 0, 0); 
-  __visc__edge(var_20, var_21, 1, 1, 1, 0); 
-  __visc__bindIn(var_21, 28, 2, 0); 
-  __visc__bindIn(var_21, 29, 3, 0); 
-
-  void* var_22 = __visc__createNodeND(0, var_22_node); 
-
-  __visc__edge(var_21, var_22, 1, 0, 0, 0); 
-  __visc__edge(var_21, var_22, 1, 1, 1, 0); 
-
-  void* var_23 = __visc__createNodeND(0, var_23_node); 
-
-  __visc__edge(var_22, var_23, 1, 0, 0, 0); 
-  __visc__edge(var_22, var_23, 1, 1, 1, 0); 
-
-  void* var_24 = __visc__createNodeND(0, var_24_node); 
-
-  __visc__edge(var_23, var_24, 1, 0, 0, 0); 
-  __visc__edge(var_23, var_24, 1, 1, 1, 0); 
-  __visc__bindIn(var_24, 30, 2, 0); 
-  __visc__bindIn(var_24, 31, 3, 0); 
-
-  void* var_25 = __visc__createNodeND(0, var_25_node); 
-
-  __visc__edge(var_24, var_25, 1, 0, 0, 0); 
-  __visc__edge(var_24, var_25, 1, 1, 1, 0); 
-  __visc__bindIn(var_25, 32, 2, 0); 
-  __visc__bindIn(var_25, 33, 3, 0); 
-
-  void* var_26 = __visc__createNodeND(0, var_26_node); 
-
-  __visc__edge(var_25, var_26, 1, 0, 0, 0); 
-  __visc__edge(var_25, var_26, 1, 1, 1, 0); 
-
-  void* var_27 = __visc__createNodeND(0, var_27_node); 
-
-  __visc__edge(var_26, var_27, 1, 0, 0, 0); 
-  __visc__edge(var_26, var_27, 1, 1, 1, 0); 
-  __visc__bindIn(var_27, 34, 2, 0); 
-  __visc__bindIn(var_27, 35, 3, 0); 
-
-  void* var_28 = __visc__createNodeND(0, var_28_node); 
-
-  __visc__edge(var_27, var_28, 1, 0, 0, 0); 
-  __visc__edge(var_27, var_28, 1, 1, 1, 0); 
-  __visc__bindIn(var_28, 36, 2, 0); 
-  __visc__bindIn(var_28, 37, 3, 0); 
-
-  void* var_29 = __visc__createNodeND(0, var_29_node); 
-
-  __visc__edge(var_28, var_29, 1, 0, 0, 0); 
-  __visc__edge(var_28, var_29, 1, 1, 1, 0); 
-
-  void* var_30 = __visc__createNodeND(0, var_30_node); 
-
-  __visc__edge(var_29, var_30, 1, 0, 0, 0); 
-  __visc__edge(var_29, var_30, 1, 1, 1, 0); 
-  __visc__bindIn(var_30, 38, 2, 0); 
-  __visc__bindIn(var_30, 39, 3, 0); 
-
-  void* var_31 = __visc__createNodeND(0, var_31_node); 
-
-  __visc__edge(var_30, var_31, 1, 0, 0, 0); 
-  __visc__edge(var_30, var_31, 1, 1, 1, 0); 
-  __visc__bindIn(var_31, 40, 2, 0); 
-  __visc__bindIn(var_31, 41, 3, 0); 
-
-  void* var_32 = __visc__createNodeND(0, var_32_node); 
-
-  __visc__edge(var_31, var_32, 1, 0, 0, 0); 
-  __visc__edge(var_31, var_32, 1, 1, 1, 0); 
-
-  void* var_33 = __visc__createNodeND(0, var_33_node); 
-
-  __visc__edge(var_32, var_33, 1, 0, 0, 0); 
-  __visc__edge(var_32, var_33, 1, 1, 1, 0); 
-
-  void* var_34 = __visc__createNodeND(0, var_34_node); 
-
-  __visc__edge(var_33, var_34, 1, 0, 0, 0); 
-  __visc__edge(var_33, var_34, 1, 1, 1, 0); 
-  __visc__bindIn(var_34, 42, 2, 0); 
-  __visc__bindIn(var_34, 43, 3, 0); 
-
-  void* var_35 = __visc__createNodeND(0, var_35_node); 
-
-  __visc__edge(var_34, var_35, 1, 0, 0, 0); 
-  __visc__edge(var_34, var_35, 1, 1, 1, 0); 
-  __visc__bindIn(var_35, 44, 2, 0); 
-  __visc__bindIn(var_35, 45, 3, 0); 
-
-  void* var_36 = __visc__createNodeND(0, var_36_node); 
-
-  __visc__edge(var_35, var_36, 1, 0, 0, 0); 
-  __visc__edge(var_35, var_36, 1, 1, 1, 0); 
-
-  void* var_37 = __visc__createNodeND(0, var_37_node); 
-
-  __visc__edge(var_36, var_37, 1, 0, 0, 0); 
-  __visc__edge(var_36, var_37, 1, 1, 1, 0); 
-  __visc__bindIn(var_37, 46, 2, 0); 
-  __visc__bindIn(var_37, 47, 3, 0); 
-
-  void* var_38 = __visc__createNodeND(0, var_38_node); 
-
-  __visc__edge(var_37, var_38, 1, 0, 0, 0); 
-  __visc__edge(var_37, var_38, 1, 1, 1, 0); 
-  __visc__bindIn(var_38, 48, 2, 0); 
-  __visc__bindIn(var_38, 49, 3, 0); 
-
-  void* var_39 = __visc__createNodeND(0, var_39_node); 
-
-  __visc__edge(var_38, var_39, 1, 0, 0, 0); 
-  __visc__edge(var_38, var_39, 1, 1, 1, 0); 
-
-  void* var_40 = __visc__createNodeND(0, var_40_node); 
-
-  __visc__edge(var_39, var_40, 1, 0, 0, 0); 
-  __visc__edge(var_39, var_40, 1, 1, 1, 0); 
-  __visc__bindIn(var_40, 50, 2, 0); 
-  __visc__bindIn(var_40, 51, 3, 0); 
-
-  void* var_41 = __visc__createNodeND(0, var_41_node); 
-
-  __visc__edge(var_40, var_41, 1, 0, 0, 0); 
-  __visc__edge(var_40, var_41, 1, 1, 1, 0); 
-  __visc__bindIn(var_41, 52, 2, 0); 
-  __visc__bindIn(var_41, 53, 3, 0); 
-
-  void* var_42 = __visc__createNodeND(0, var_42_node); 
-
-  __visc__edge(var_41, var_42, 1, 0, 0, 0); 
-  __visc__edge(var_41, var_42, 1, 1, 1, 0); 
-
-  void* var_43 = __visc__createNodeND(0, var_43_node); 
-
-  __visc__edge(var_42, var_43, 1, 0, 0, 0); 
-  __visc__edge(var_42, var_43, 1, 1, 1, 0); 
-
-  void* var_44 = __visc__createNodeND(0, var_44_node); 
-
-  __visc__edge(var_43, var_44, 1, 0, 0, 0); 
-  __visc__edge(var_43, var_44, 1, 1, 1, 0); 
-  __visc__bindIn(var_44, 54, 2, 0); 
-  __visc__bindIn(var_44, 55, 3, 0); 
-
-  void* var_45 = __visc__createNodeND(0, var_45_node); 
-
-  __visc__edge(var_44, var_45, 1, 0, 0, 0); 
-  __visc__edge(var_44, var_45, 1, 1, 1, 0); 
-  __visc__bindIn(var_45, 56, 2, 0); 
-  __visc__bindIn(var_45, 57, 3, 0); 
-
-  void* var_46 = __visc__createNodeND(0, var_46_node); 
-
-  __visc__edge(var_45, var_46, 1, 0, 0, 0); 
-  __visc__edge(var_45, var_46, 1, 1, 1, 0); 
-
-  void* var_47 = __visc__createNodeND(0, var_47_node); 
-
-  __visc__edge(var_46, var_47, 1, 0, 0, 0); 
-  __visc__edge(var_46, var_47, 1, 1, 1, 0); 
-  __visc__bindIn(var_47, 58, 2, 0); 
-  __visc__bindIn(var_47, 59, 3, 0); 
-
-  void* var_48 = __visc__createNodeND(0, var_48_node); 
-
-  __visc__edge(var_47, var_48, 1, 0, 0, 0); 
-  __visc__edge(var_47, var_48, 1, 1, 1, 0); 
-  __visc__bindIn(var_48, 60, 2, 0); 
-  __visc__bindIn(var_48, 61, 3, 0); 
-
-  void* var_49 = __visc__createNodeND(0, var_49_node); 
-
-  __visc__edge(var_48, var_49, 1, 0, 0, 0); 
-  __visc__edge(var_48, var_49, 1, 1, 1, 0); 
-
-  __visc__bindOut(var_49, 0, 0, 0); 
-  __visc__bindOut(var_49, 1, 1, 0); 
-
-}
-
-struct ret_t {
-  void* tensor; 
-  size_t bytes; 
-}; 
-
-typedef struct __attribute__((__packed__)) {
-  void* input; 
-  size_t input_bytes; 
-  void* conv2d_1_w; 
-  size_t conv2d_1_w_bytes; 
-  void* conv2d_1_b; 
-  size_t conv2d_1_b_bytes; 
-  void* conv2d_2_w; 
-  size_t conv2d_2_w_bytes; 
-  void* conv2d_2_b; 
-  size_t conv2d_2_b_bytes; 
-  void* conv2d_3_w; 
-  size_t conv2d_3_w_bytes; 
-  void* conv2d_3_b; 
-  size_t conv2d_3_b_bytes; 
-  void* conv2d_4_w; 
-  size_t conv2d_4_w_bytes; 
-  void* conv2d_4_b; 
-  size_t conv2d_4_b_bytes; 
-  void* conv2d_5_w; 
-  size_t conv2d_5_w_bytes; 
-  void* conv2d_5_b; 
-  size_t conv2d_5_b_bytes; 
-  void* conv2d_6_w; 
-  size_t conv2d_6_w_bytes; 
-  void* conv2d_6_b; 
-  size_t conv2d_6_b_bytes; 
-  void* conv2d_7_w; 
-  size_t conv2d_7_w_bytes; 
-  void* conv2d_7_b; 
-  size_t conv2d_7_b_bytes; 
-  void* conv2d_8_w; 
-  size_t conv2d_8_w_bytes; 
-  void* conv2d_8_b; 
-  size_t conv2d_8_b_bytes; 
-  void* conv2d_9_w; 
-  size_t conv2d_9_w_bytes; 
-  void* conv2d_9_b; 
-  size_t conv2d_9_b_bytes; 
-  void* conv2d_10_w; 
-  size_t conv2d_10_w_bytes; 
-  void* conv2d_10_b; 
-  size_t conv2d_10_b_bytes; 
-  void* conv2d_11_w; 
-  size_t conv2d_11_w_bytes; 
-  void* conv2d_11_b; 
-  size_t conv2d_11_b_bytes; 
-  void* conv2d_12_w; 
-  size_t conv2d_12_w_bytes; 
-  void* conv2d_12_b; 
-  size_t conv2d_12_b_bytes; 
-  void* conv2d_13_w; 
-  size_t conv2d_13_w_bytes; 
-  void* conv2d_13_b; 
-  size_t conv2d_13_b_bytes; 
-  void* dense_1_w; 
-  size_t dense_1_w_bytes; 
-  void* dense_1_b; 
-  size_t dense_1_b_bytes; 
-  void* dense_2_w; 
-  size_t dense_2_w_bytes; 
-  void* dense_2_b; 
-  size_t dense_2_b_bytes; 
-
-  struct ret_t r; 
-}
-RootIn;
-
-int main(){ 
-
-std::string dir_prefix = std::string("vgg16_cifar100_test/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-
-__visc__init(); 
-RootIn* args = static_cast<RootIn*>(malloc(sizeof(RootIn))); 
-
-args->input = input; 
-args->input_bytes = 0; 
-args->conv2d_1_w = conv2d_1_w; 
-args->conv2d_1_w_bytes = 0; 
-args->conv2d_1_b = conv2d_1_b; 
-args->conv2d_1_b_bytes = 0; 
-args->conv2d_2_w = conv2d_2_w; 
-args->conv2d_2_w_bytes = 0; 
-args->conv2d_2_b = conv2d_2_b; 
-args->conv2d_2_b_bytes = 0; 
-args->conv2d_3_w = conv2d_3_w; 
-args->conv2d_3_w_bytes = 0; 
-args->conv2d_3_b = conv2d_3_b; 
-args->conv2d_3_b_bytes = 0; 
-args->conv2d_4_w = conv2d_4_w; 
-args->conv2d_4_w_bytes = 0; 
-args->conv2d_4_b = conv2d_4_b; 
-args->conv2d_4_b_bytes = 0; 
-args->conv2d_5_w = conv2d_5_w; 
-args->conv2d_5_w_bytes = 0; 
-args->conv2d_5_b = conv2d_5_b; 
-args->conv2d_5_b_bytes = 0; 
-args->conv2d_6_w = conv2d_6_w; 
-args->conv2d_6_w_bytes = 0; 
-args->conv2d_6_b = conv2d_6_b; 
-args->conv2d_6_b_bytes = 0; 
-args->conv2d_7_w = conv2d_7_w; 
-args->conv2d_7_w_bytes = 0; 
-args->conv2d_7_b = conv2d_7_b; 
-args->conv2d_7_b_bytes = 0; 
-args->conv2d_8_w = conv2d_8_w; 
-args->conv2d_8_w_bytes = 0; 
-args->conv2d_8_b = conv2d_8_b; 
-args->conv2d_8_b_bytes = 0; 
-args->conv2d_9_w = conv2d_9_w; 
-args->conv2d_9_w_bytes = 0; 
-args->conv2d_9_b = conv2d_9_b; 
-args->conv2d_9_b_bytes = 0; 
-args->conv2d_10_w = conv2d_10_w; 
-args->conv2d_10_w_bytes = 0; 
-args->conv2d_10_b = conv2d_10_b; 
-args->conv2d_10_b_bytes = 0; 
-args->conv2d_11_w = conv2d_11_w; 
-args->conv2d_11_w_bytes = 0; 
-args->conv2d_11_b = conv2d_11_b; 
-args->conv2d_11_b_bytes = 0; 
-args->conv2d_12_w = conv2d_12_w; 
-args->conv2d_12_w_bytes = 0; 
-args->conv2d_12_b = conv2d_12_b; 
-args->conv2d_12_b_bytes = 0; 
-args->conv2d_13_w = conv2d_13_w; 
-args->conv2d_13_w_bytes = 0; 
-args->conv2d_13_b = conv2d_13_b; 
-args->conv2d_13_b_bytes = 0; 
-args->dense_1_w = dense_1_w; 
-args->dense_1_w_bytes = 0; 
-args->dense_1_b = dense_1_b; 
-args->dense_1_b_bytes = 0; 
-args->dense_2_w = dense_2_w; 
-args->dense_2_w_bytes = 0; 
-args->dense_2_b = dense_2_b; 
-args->dense_2_b_bytes = 0; 
-
-void* dfg = __visc__launch(0, root, (void*) args); 
-
-__visc__wait(dfg); 
-
-void *result = static_cast<RootIn*>(args)->input; 
-hpvm_request_tensor(result, 0); 
-
-__visc__cleanup(); 
- computeAccuracy2(labels, 10000, result); 
-return 0; 
-
-} 
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_10_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_10_b.bin
deleted file mode 100644
index 9db02dbd02d278f3ff0ca57a29069c6aa97e5de0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_10_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_10_w.bin
deleted file mode 100644
index 7bded494d26ebe0ef8d5807bf0d72b93e996bd88..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_11_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_11_b.bin
deleted file mode 100644
index c504961705fa627a5d79745fb199dd3ba657176a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_11_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_11_w.bin
deleted file mode 100644
index a43074e10f04f0e1f84339f053fa9fa160afcfe0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_12_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_12_b.bin
deleted file mode 100644
index f355f4cd6814c73adc88d91610792d28afc55f26..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_12_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_12_w.bin
deleted file mode 100644
index e0a3ba8ec934f80a8e25079f6ba2df4a80ddb8dd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_13_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_13_b.bin
deleted file mode 100644
index 02b34402d6400d6ca28cb23e71149a47f003c6fe..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_13_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_13_w.bin
deleted file mode 100644
index b9b2f0063091bd1cf4cce556947588e902acca1f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_1_b.bin
deleted file mode 100644
index a46ece4fa84149d5501331039ae48becebc94aa1..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_1_w.bin
deleted file mode 100644
index a479061bc1cc7876846b9602fdf4058b31e539c5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_2_b.bin
deleted file mode 100644
index 734a5cbf3ea3501205bdc024695afb0c716252d3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_2_b.bin
+++ /dev/null
@@ -1,2 +0,0 @@
-ãèÏ;ÿCú=×_Q>-î½Z5Ì=&$¿í÷‡>È,…>sv¾è2¼.ãÏ<Ã4$>׆0>Û)¿vg§¾Çô¬¿‡·K=Žõ¼=9ó¾Ÿî²¼ée=¬P¡?•¿Ì¾º}Œ=½	~>–·"¼bKƒ>«î—¾Hu	>v$™¾Æj—?cß8>OÏñ=kÇ<>ÙYM>(¿jît>щ>Ä…&¾gH¿·g&=«¿
-k>`M¶¿	?³0T>ò£>u‹>]ˆÁ½®‚¿m‚¿M—a>ù䑽2>ë>¯ñ
¿W[…>²>#z·>Õ®Ú;›J¸=žr=€Ã=£>
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_2_w.bin
deleted file mode 100644
index c0d54721818458b65a9224583d51977afab9cdbd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_3_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_3_b.bin
deleted file mode 100644
index bbf5b5fc5ae9990cdb155111975a1c6a26801012..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_3_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_3_w.bin
deleted file mode 100644
index 98e8ac21b55eb35b133cde17db42e6b54e34c6b0..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_4_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_4_b.bin
deleted file mode 100644
index 8251640419305c37ef428b620ac6bea8c19b2462..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_4_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_4_w.bin
deleted file mode 100644
index a69f63de1d1f83768635a2d6e062e2773d3c4035..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_5_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_5_b.bin
deleted file mode 100644
index 188fded6c6130aa36748791dba917a55dc93b626..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_5_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_5_w.bin
deleted file mode 100644
index af70bc4bf9b92e25c7dfeb3033d23eedf9b50552..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_6_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_6_b.bin
deleted file mode 100644
index e2999812df22d44cf3e658d32986e6ccbabc9d4c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_6_b.bin
+++ /dev/null
@@ -1,5 +0,0 @@
-Qwü¾	¶–>ó<
->¼çV¼ŠP5?“ãð=Á89½vzî<ó±=¢4½`ô?~§?#‰>5ξûë$>8éü¾‰“>)q¢>¹L…>ÐMé½çÐÂ=s	¯>v)$?ÂU¿V‹>s•¶=ñÄ=•W?ž”p>Yhß>b–»½ž€>A²Æ>|Yi>²–½Ò~?èát>¦3f¾?ë¾…~£>Ë…·>Ûgž¾¦_=¼ü<œ‰€>y,¿}«·>š?O=
->.Íô=”í¾
dP¾<>¶¤>?^÷½o)	>Ä#?¹”s½ÎM~>‚?øáy>FÅT¾4E“=†¿§B½I€=Û=?öG?¸ÛÑ>N0Ç>=ü?ßÞ¥=lѼ½üÜT>¢îæ>S>M>YЊ=t[N¾ámÄ>Z¶Ò>¯^T>pxõ>‡Ú>¹ÿH¾î÷q>wO$¾
-¿û0/>ØË¹>ót$¿žÁ?º¨®=x½0w>1Z,¾€3ã>Äš×=¥b¸=è6@>ìŒÐ>!4š>ý´>¸D†>°<Œ=ˆh_=}>ÿ C>‘¿þ½=ã=uj>[W?d¨Ö>Õk±>â(½“Â`¼ML“>·£?iC;>p·?èçŠ>G™>²¾yrì>e6ƒ¾ægs>bø?§{(?l¡â»:U¡>\å?v'é=÷ðœ>+U>osÖ>õq'>Žçô>Üã&>àI¾[´Î=çE ?-áD½‹>ZWy>líœ>û¯>-­®¾jA¾`R¿êþL¿1Ôº6¡h»Y„=sµ)?šà—>þÌ?6ܨ=ðîÜ>C>–š>Êÿ­>ä¶Ò>n"Ø>6¥Á¾Lƒ½bþ1?8}½b›Ê>ÖÁ>`ú>÷ÂÊ>ß
-G>Џå>öÑ >ûøq>±Šÿ>UÙ$?Ò?Ïm¼{mb>_I$>'-Ö>+æ+>èd¹>¦@¿^´†>Z(¯¼Ãͽð¾i>ë h¿ Õ‹=’Ï>›s>,|>þ®>ïºS>Æ–>%>“?»Àå>¤¢Á½Û÷Ø>û‚œ;õ)ï=v6̾> >®£”<i°¾¬é>Žà×¼ÜüÏ>0œÌ>˜Lä>ʯ¾–“n>ບ>!û¯>€õ£>G>ÌÍÑ>i1>•⏼§8½ ε>AˆÖ>‰F<A?¯>Ìt>x=wô?ñl	?4®_>t^»>BáÒ>¾fÙ½`:˜½-N@?ñw›¾ð|÷<É'‘>ºJ¾|>⽩ë´>ß0b>áXß½£å'¾fMM>£Ix>¥&Ƚ3úƒ>â'È>­þþ>8åÉ>Y|§>ûT½9º¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_6_w.bin
deleted file mode 100644
index bc0f6bb98d293a7eca254d30b39b9962830e366b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_7_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_7_b.bin
deleted file mode 100644
index 6e13d00189e9d484ac13b6e6ddd0a8fb811d15e5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_7_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_7_w.bin
deleted file mode 100644
index 52f165dac46686de7c74d0f13d58a7621eedde32..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_8_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_8_b.bin
deleted file mode 100644
index 522044eaf02e5cff51a74cddc769c13bf1d750b7..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_8_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_8_w.bin
deleted file mode 100644
index 2c11e701d36747b6c045d5eca522bdb89eb7d1f8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_9_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_9_b.bin
deleted file mode 100644
index 0569ef5fe88a2d24a43f73411e7509661233cdff..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_9_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_9_w.bin
deleted file mode 100644
index bd374d936bc8977756ff5968fd32fc7e116ad560..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_1_b.bin
deleted file mode 100644
index dc4f5cf56c3308245ca8ffc63f2bde019be15e95..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_1_w.bin
deleted file mode 100644
index af9de099702a966e0746d9f45bae02c4335eda70..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_2_b.bin
deleted file mode 100644
index e5d561e947d0b8b42ecb039bee0d53076490337a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_2_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_2_w.bin
deleted file mode 100644
index 351da0667b4391bd8cd3db0ce7bf4b8621931f0b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/dense_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/input.bin
deleted file mode 100644
index 0492b77f00b91dbb0b321ec6c77c20737d388afa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/labels.bin
deleted file mode 100644
index 4d4041bc05c6037e7612f325104ae24da525c5be..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/labels32.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/labels32.bin
deleted file mode 100644
index 0e29aa05a086316e36429e20e1a13580e1b0c36a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/labels32.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/layer_composition.txt b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/layer_composition.txt
deleted file mode 100644
index 79818d6f010035c6e19f12881749f4d5b3d3c253..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/layer_composition.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-conv  add  activation  
-conv  add  activation  pool  
-conv  add  activation  
-conv  add  activation  pool  
-conv  add  activation  
-conv  add  activation  
-conv  add  activation  pool  
-conv  add  activation  
-conv  add  activation  
-conv  add  activation  pool  
-conv  add  activation  
-conv  add  activation  
-conv  add  activation  pool  
-dense  add  activation  
-dense  add  
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/layers.txt b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/layers.txt
deleted file mode 100644
index 7eaa520e4a2e451d5ccec5c8737dec8be8458369..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/layers.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-Conv1,10000,3,32,32,64,3,3,3
-Conv2,10000,64,32,32,64,64,3,3
-Conv3,10000,64,16,16,128,64,3,3
-Conv4,10000,128,16,16,128,128,3,3
-Conv5,10000,128,8,8,256,128,3,3
-Conv6,10000,256,8,8,256,256,3,3
-Conv7,10000,256,8,8,256,256,3,3
-Conv8,10000,256,4,4,512,256,3,3
-Conv9,10000,512,4,4,512,512,3,3
-Conv10,10000,512,4,4,512,512,3,3
-Conv11,10000,512,2,2,512,512,3,3
-Conv12,10000,512,2,2,512,512,3,3
-Conv13,10000,512,2,2,512,512,3,3
-FC1,10000,512,512,512
-FC2,10000,512,512,100
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/promise_src.cc
deleted file mode 100644
index 0f28f2bfd69d9e8c4895e782bd02173eefcd0993..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/promise_src.cc
+++ /dev/null
@@ -1,138 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-int total_runs = 100; 
-for (int i = 0 ; i < total_runs; i++){ 
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-
-
-std::string dir_prefix = std::string("vgg16_cifar100_test/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = ConvLayer_PROMISE(input, -1.7829767, 1.9456929, conv2d_1_w, -0.7450515, 0.71249133, conv2d_1_b, -1.5885142, 0.275554, 1, 1, 1, 1, -1, 0, 1, 0.0, 8.190712, 9); 
-void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 8.190712, conv2d_2_w, -0.30790088, 0.43504623, conv2d_2_b, -1.4242363, 1.2602744, 1, 1, 1, 1, 0, 2, 1, 0.0, 19.023172, 9); 
-void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 19.023172, conv2d_3_w, -0.29189092, 0.26958522, conv2d_3_b, -1.0527138, 0.9075671, 1, 1, 1, 1, -1, 0, 1, 0.0, 14.428051, 9); 
-void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 14.428051, conv2d_4_w, -0.15521508, 0.1829038, conv2d_4_b, -0.845419, 1.9358484, 1, 1, 1, 1, 0, 2, 1, 0.0, 23.065294, 9); 
-void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 23.065294, conv2d_5_w, -0.13149762, 0.14811686, conv2d_5_b, -0.7162557, 1.0370971, 1, 1, 1, 1, -1, 0, 1, 0.0, 15.165984, 9); 
-void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 15.165984, conv2d_6_w, -0.06236292, 0.08321518, conv2d_6_b, -0.9067523, 0.9922458, 1, 1, 1, 1, -1, 0, 1, 0.0, 13.664733, 9); 
-void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 13.664733, conv2d_7_w, -0.06471479, 0.1024472, conv2d_7_b, -0.15943134, 0.7988499, 1, 1, 1, 1, 0, 2, 1, 0.0, 19.025272, 9); 
-void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 19.025272, conv2d_8_w, -0.06320205, 0.08291938, conv2d_8_b, -0.32540628, 0.5203079, 1, 1, 1, 1, -1, 0, 1, 0.0, 6.727217, 9); 
-void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 6.727217, conv2d_9_w, -0.037707984, 0.051601283, conv2d_9_b, -0.25622904, 0.11251946, 1, 1, 1, 1, -1, 0, 1, 0.0, 3.2003012, 9); 
-void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 3.2003012, conv2d_10_w, -0.056007143, 0.09549151, conv2d_10_b, -0.11591503, 0.06267536, 1, 1, 1, 1, 0, 2, 1, 0.0, 4.321189, 9); 
-void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 4.321189, conv2d_11_w, -0.060094673, 0.10868926, conv2d_11_b, -0.105962686, 0.09584572, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.936297, 9); 
-void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 2.936297, conv2d_12_w, -0.034618977, 0.05792674, conv2d_12_b, -0.4237576, 0.11035452, 1, 1, 1, 1, -1, 0, 1, 0.0, 4.87262, 9); 
-void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 4.87262, conv2d_13_w, -0.035480656, 0.058295887, conv2d_13_b, -0.21477045, 0.14263579, 1, 1, 1, 1, 0, 2, 1, 0.0, 10.32133, 9); 
-void* var_13 = FCLayer_PROMISE(var_12, 0.0, 10.32133, dense_1_w, -0.08929961, 0.11301676, dense_1_b, -0.20798548, 0.47405547, 1, 0.0, 13.91, 9); 
-void* var_14 = FCLayer_PROMISE(var_13, 0.0, 13.91, dense_2_w, -0.6627122, 0.35539475, dense_2_b, -1.0631907, 0.9830786, -1, -70.45701, 87.34367, 9); 
-void* var_15 = tensorSoftmax(var_14); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_15); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-}
-
-dumpExecutionAccuracies(); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/src.cc
deleted file mode 100644
index bb792eaf71e851a5bf9791362aa09991dbc8ef68..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar100_front/src.cc
+++ /dev/null
@@ -1,164 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("vgg16_cifar100_test/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100); 
-std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1); 
-
-
-
-startMemTracking(); 
-
-int test_input_size = 10000; 
-int batch_size = 10000; 
-int batch_count = test_input_size / batch_size; 
-float final_accuracy = 0.0; 
-
-for(int i = 0; i < batch_count; i++){ 
-
-int start = i * batch_size; 
-int end = (i + 1) * batch_size; 
-
-void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32); 
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-void* var_2 = tensorRelu(var_1); 
-void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-void* var_6 = tensorRelu(var_5); 
-void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-void* var_10 = tensorRelu(var_9); 
-void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-void* var_14 = tensorRelu(var_13); 
-void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-void* var_18 = tensorRelu(var_17); 
-void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-void* var_22 = tensorRelu(var_21); 
-void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-void* var_26 = tensorRelu(var_25); 
-void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-void* var_30 = tensorRelu(var_29); 
-void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-void* var_34 = tensorRelu(var_33); 
-void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-void* var_42 = tensorRelu(var_41); 
-void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-void* var_46 = tensorRelu(var_45); 
-void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-void* var_50 = tensorRelu(var_49); 
-void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-void* var_55 = tensorAdd(var_54, dense_1_b); 
-void* var_56 = tensorRelu(var_55); 
-void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-void* var_59 = tensorAdd(var_58, dense_2_b); 
-void* var_60 = tensorSoftmax(var_59); 
-
-uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end); 
-
-float accuracy = computeAccuracy2(labels, batch_size, var_60); 
-final_accuracy += accuracy; 
-freeBatchMemory(); 
- 
-}
-
-final_accuracy = final_accuracy / batch_count; 
-dumpFinalAccuracy(final_accuracy); 
-
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_10_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_10_b.bin
deleted file mode 100644
index 5c1018acfb9bced92638c8ec85b8b2637c525f89..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_10_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_10_w.bin
deleted file mode 100644
index 9741a6c0ab1be107d7777e8ba3cefba61a58ea13..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_11_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_11_b.bin
deleted file mode 100644
index 2103a27bb8f5e9dc4799f435939c99b9e5d27c63..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_11_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_11_w.bin
deleted file mode 100644
index 3af50120b4e5febeb17d85e1ab6636ea4aff68ce..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_12_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_12_b.bin
deleted file mode 100644
index 86d09c41cef5bfbc2776901092db0227e7dfe318..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_12_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_12_w.bin
deleted file mode 100644
index 9307262357575ab5bc973cc99889637055e90841..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_13_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_13_b.bin
deleted file mode 100644
index 43788cb58d1d59d23fab0329c55cf1ec442ae1bd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_13_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_13_w.bin
deleted file mode 100644
index 1935a50645b8420a3c6767692f60ef57c685074a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_1_b.bin
deleted file mode 100644
index bd4b82494cab28510ff7fc55f565ece1bfb8f7fb..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_1_w.bin
deleted file mode 100644
index eb883ee6024240b14215ef0e9df3152fe35c99f3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_2_b.bin
deleted file mode 100644
index 93bb77192d28c7fcf6f92e0ffaab9637b763df28..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_2_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-!Ü=!J¹<«¾j>ÓOä=ŸÐî:²_¾Dr=~>Y«=֝>³½iÛ½ñ3½,3¾ŠÛ>ÇÛ=¸¾uŒ>ž\<Z7=Z§¬=''6¾o¨;Ñ»õ¾à¡t¾©¾	òG½àÓ¨¾¥å¼ôÿ]¾,O¨=¶’<»lÏ=¥Ò">¡'Œ½ÃŠ=®¤T¾Z=­=2éÏ=üjë½ûHF¿õ¾“œ¼£}=³¬¾ßä¼Ó°”>ÛïQ?VÍ<; =8ñ‰½‹xr¾¢°}<ÿz>îšG¾{“½ax6¾¶Î‚»$Ôë¾DpÎ=Š|5¾Á2>¤tY>ä©=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_2_w.bin
deleted file mode 100644
index 9f8047bf248a0c79004f352b1610d6c1dd13c019..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_3_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_3_b.bin
deleted file mode 100644
index d1c8245fc8ac5933fac0a89cfa5d6213e819410a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_3_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_3_w.bin
deleted file mode 100644
index 04e97f2c791ac1a52d93382f2ffe7235b6fa6806..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_4_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_4_b.bin
deleted file mode 100644
index cafc8138078aed273e04d971d32ef9200e6fcdad..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_4_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_4_w.bin
deleted file mode 100644
index 8e8962fc8523eea064719fdab39481c344f48428..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_5_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_5_b.bin
deleted file mode 100644
index 29d07bfcd1265fbc102a0e449da2de74732178ff..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_5_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_5_w.bin
deleted file mode 100644
index 32b5bf0f4567e5eeb4f542abcb67a2284cff3180..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_6_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_6_b.bin
deleted file mode 100644
index 5b2878eed5c2e9b95608ba3215fa25cdde175d18..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_6_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_6_w.bin
deleted file mode 100644
index 74924b2c639f16c230f167b47c1a9943fbc6a439..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_7_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_7_b.bin
deleted file mode 100644
index ab645f86d150c69fd1038d0ae687d8947b0baa71..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_7_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_7_w.bin
deleted file mode 100644
index 17309bf27f0fdcf360c42d106e6396670ce46b9e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_8_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_8_b.bin
deleted file mode 100644
index a95ca5bfec74e45598866ee6271818adb91115e5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_8_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_8_w.bin
deleted file mode 100644
index cd973995acba0f7f87668ccaba198b6b19b8f5f9..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_9_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_9_b.bin
deleted file mode 100644
index 2eeb9f1daac2d414b251a3696aea4d8847159b6b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_9_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_9_w.bin
deleted file mode 100644
index d34c4d2b9ab1f28e16392da758ef2cfc4ab56359..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_1_b.bin
deleted file mode 100644
index 894c13f1e61a964e0490904d48c7ee6aea3a82d4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_1_w.bin
deleted file mode 100644
index a45c830ca91e2bbf995d447edaa308d4c7261ea5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_2_b.bin
deleted file mode 100644
index dc02631634718d4b6716876538380cc0596a2ef6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_2_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-
Sî>:Ä}¾õ…>ºpž?ñ‡¿’ß²>Vâ–¿Q0å<qö>Ò/“<
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_2_w.bin
deleted file mode 100644
index 05f5c5cc9c95e171f54836b452babbb48be7ab08..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/dense_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/input.bin
deleted file mode 100644
index d500ac2cdaf78b2ab0e51eb9f8d89174247e52d5..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/labels.bin
deleted file mode 100644
index 4451911edf1afe4b0bc792730fbca3d4141d0c50..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/labels32.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/labels32.bin
deleted file mode 100644
index bf2090756b593142d0575f82bb52bdcd7bfee6d8..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/labels32.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/src.cc
deleted file mode 100644
index 44179ee9f39c9547270b45fc84249835350bee5f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/src.cc
+++ /dev/null
@@ -1,141 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("vgg16_cifar10/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-void* var_2 = tensorRelu(var_1); 
-void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-void* var_6 = tensorRelu(var_5); 
-void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-void* var_10 = tensorRelu(var_9); 
-void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-void* var_14 = tensorRelu(var_13); 
-void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-void* var_18 = tensorRelu(var_17); 
-void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-void* var_22 = tensorRelu(var_21); 
-void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-void* var_26 = tensorRelu(var_25); 
-void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-void* var_30 = tensorRelu(var_29); 
-void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-void* var_34 = tensorRelu(var_33); 
-void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-void* var_42 = tensorRelu(var_41); 
-void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-void* var_46 = tensorRelu(var_45); 
-void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-void* var_50 = tensorRelu(var_49); 
-void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-void* var_55 = tensorAdd(var_54, dense_1_b); 
-void* var_56 = tensorRelu(var_55); 
-void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-void* var_59 = tensorAdd(var_58, dense_2_b); 
-void* var_60 = tensorSoftmax(var_59); 
-
-computeAccuracy2(labels,10000,var_60); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/vgg16_cifar_calib.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/vgg16_cifar_calib.bin
deleted file mode 100644
index 43bc1e5b985604c5a17fe67d2db4fec82e12042d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/vgg16_cifar_calib.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/vgg16_train_labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/vgg16_train_labels.bin
deleted file mode 100644
index 9be730fd6f397987a6948a8d9196c7e156675d1b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_2/vgg16_train_labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_10_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_10_b.bin
deleted file mode 100644
index 3237726ac0c7d5ec28ebdfb6a797fc58a045eb1f..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_10_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_10_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_10_w.bin
deleted file mode 100644
index 88899f03befed6c0d4e558856581ebb431c314e3..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_10_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_11_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_11_b.bin
deleted file mode 100644
index 53a798786c32ccbe5a9b5a0b07be099fb3eff7bd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_11_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_11_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_11_w.bin
deleted file mode 100644
index ab9ac3fbeb148e1e5d7d3113823c85d020e04d01..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_11_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_12_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_12_b.bin
deleted file mode 100644
index a5ff1a8af10f961ee863771fb5195eedf3cec074..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_12_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_12_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_12_w.bin
deleted file mode 100644
index 1e7d532a55d79c6a241dc32505db0d2262bd730e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_12_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_13_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_13_b.bin
deleted file mode 100644
index 313d97ba32b9f09ae149ee6ba21eaafd17c59e9d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_13_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_13_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_13_w.bin
deleted file mode 100644
index 1f20f0387277ef57d5bf1844f7ce290a222fde2b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_13_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_1_b.bin
deleted file mode 100644
index 362fc9c4d288a2ea91f35c17555d538255402b45..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_1_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-·z½\y»=ß3Å=¿:â=&@¿Ì¦N¾·¤D=ÿå=†²Œ»n>Å`ݽ²Ñ`½¦¾J¸¾æ±½;¡`6>T=.	¿Ìsä¼¾i}¾‰9p=YÜ÷=ªK=äl¼…ª>Þ«q>¬üy½>Ä‘(;Ƥ=G>o:ß®=éu
>v‰¾9áé<ûCø=9º:_2Í=ù¾´<
Œ¾rêœ=ú>UÇ÷=½<Sš=‹½A8¯="¾‘vÓ=½|¾&ÓI¾-/í=Ђ¿-ø=sǾ-]´¾pIÌ=_ÕÖ=	žŠ=v>S™ö=ë(¾Ö¹=
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_1_w.bin
deleted file mode 100644
index f4eb5cb83d095dda38776074e62b697a75af8332..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_2_b.bin
deleted file mode 100644
index 36f65a5ca90d01a101b556b740ff101dd9546e27..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_2_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_2_w.bin
deleted file mode 100644
index 5fc166c0b4434dc35ef10c9bf7b384a33e80e59a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_3_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_3_b.bin
deleted file mode 100644
index fa6587f7a9ea3c1b081887ebc9d43eeebc5c150a..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_3_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_3_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_3_w.bin
deleted file mode 100644
index d9167a3c0987224015310eae98aa3adc9238c939..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_3_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_4_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_4_b.bin
deleted file mode 100644
index bc84c0b4581db9e731fa53acce502ffed3545101..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_4_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_4_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_4_w.bin
deleted file mode 100644
index 1d925d06083b623c0a8cf6427bbf59d67491b68e..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_4_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_5_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_5_b.bin
deleted file mode 100644
index 553fd648565463a7c9fe3a7c48d92f6d02b5da4b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_5_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_5_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_5_w.bin
deleted file mode 100644
index c7fcae11be3f48d10f674f6a6f9e13b47df937fe..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_5_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_6_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_6_b.bin
deleted file mode 100644
index 600a3cc1d8f0e4e180b4388793a05b5f0dbed9aa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_6_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_6_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_6_w.bin
deleted file mode 100644
index 05746dc0b683bd7e888fd5a916dd25394c0904cc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_6_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_7_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_7_b.bin
deleted file mode 100644
index 0106fee89685bb112596c75a88ab2fd81f8f0d3d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_7_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_7_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_7_w.bin
deleted file mode 100644
index 8b1fd0dcf60298e0d7a230749fcf3fb3481360d2..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_7_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_8_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_8_b.bin
deleted file mode 100644
index 8debd467e4d7243faa809c92be0080326edfda0b..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_8_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_8_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_8_w.bin
deleted file mode 100644
index e78a4ec4fdcf179ad518307fd23bed73dfe4ed28..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_8_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_9_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_9_b.bin
deleted file mode 100644
index 9f67d65429cced68475bfc2520b6fce4ac837802..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_9_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_9_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_9_w.bin
deleted file mode 100644
index 698d737ccef902924c6d22ad3c2f15a7fbf588db..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/conv2d_9_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_1_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_1_b.bin
deleted file mode 100644
index 5fa2218731e61d7723c2e44ee2250a04b954d6bc..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_1_b.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_1_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_1_w.bin
deleted file mode 100644
index 5487379b881c8acdf60a6890d26612d9d2554f58..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_1_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_2_b.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_2_b.bin
deleted file mode 100644
index 026e08646b605bfd004ddc9fb394c03d14e4d00a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_2_b.bin
+++ /dev/null
@@ -1 +0,0 @@
-³·½·‰¿Ršè>‹ª?˽r>[@>.»>e\=Ü"H¿á×ö¾
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_2_w.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_2_w.bin
deleted file mode 100644
index 2ded2b61f6c80d1e9271ec8c2a0cdb2aad4f02fa..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/dense_2_w.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/input.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/input.bin
deleted file mode 100644
index d64c33474c400a77024b417127bdc675a61da941..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/input.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/labels.bin b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/labels.bin
deleted file mode 100644
index 7172750913a297f331af9ba88bce0d3e49968d47..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/labels.bin and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/promise_src.cc b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/promise_src.cc
deleted file mode 100644
index 193e378bfc3730f48ccdf9ee3c826db14bb6d7ea..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/promise_src.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../../tensor_runtime/include/tensor_runtime.h" 
-#include "../../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-
-std::string dir_prefix = std::string("vgg16_cifar10_promise/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_0 = ConvLayer_PROMISE(input, -1.8816367, 2.0934217, conv2d_1_w, -0.5277183, 0.47715914, conv2d_1_b, -0.7505816, 0.23600718, 1, 1, 1, 1, -1, 0, 1, 0.0, 6.4866476, 9); 
-void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 6.4866476, conv2d_2_w, -0.26202837, 0.29998416, conv2d_2_b, -0.66326994, 0.8624978, 1, 1, 1, 1, 0, 2, 1, 0.0, 11.21298, 9); 
-void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 11.21298, conv2d_3_w, -0.15608788, 0.14187561, conv2d_3_b, -0.3363146, 0.6194402, 1, 1, 1, 1, -1, 0, 1, 0.0, 6.1078873, 9); 
-void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 6.1078873, conv2d_4_w, -0.081584536, 0.120576195, conv2d_4_b, -0.49956822, 0.615466, 1, 1, 1, 1, 0, 2, 1, 0.0, 5.005309, 9); 
-void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 5.005309, conv2d_5_w, -0.081922, 0.09836473, conv2d_5_b, -0.15489745, 0.24373364, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.5644715, 9); 
-void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 1.5644715, conv2d_6_w, -0.056836933, 0.06501525, conv2d_6_b, -0.069405295, 0.0862945, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.7939606, 9); 
-void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 0.7939606, conv2d_7_w, -0.05773365, 0.07874803, conv2d_7_b, -0.05009718, 0.055672053, 1, 1, 1, 1, 0, 2, 1, 0.0, 1.1549584, 9); 
-void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 1.1549584, conv2d_8_w, -0.036825273, 0.058849376, conv2d_8_b, -0.055235144, 0.024307447, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.6689139, 9); 
-void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 0.6689139, conv2d_9_w, -0.016022889, 0.022971416, conv2d_9_b, -0.046823673, 0.027896304, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.92137647, 9); 
-void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 0.92137647, conv2d_10_w, -0.027309904, 0.041355837, conv2d_10_b, -0.038741723, 0.029062608, 1, 1, 1, 1, 0, 2, 1, 0.0, 5.9661603, 9); 
-void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 5.9661603, conv2d_11_w, -0.03903461, 0.05086205, conv2d_11_b, -0.045523863, 0.055019163, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.780327, 9); 
-void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 2.780327, conv2d_12_w, -0.01921058, 0.03577844, conv2d_12_b, -0.05337079, 0.09478004, 1, 1, 1, 1, -1, 0, 1, 0.0, 3.7779117, 9); 
-void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 3.7779117, conv2d_13_w, -0.022871122, 0.025569547, conv2d_13_b, -0.15490761, 0.15087973, 1, 1, 1, 1, 0, 2, 1, 0.0, 6.358653, 9); 
-void* var_13 = FCLayer_PROMISE(var_12, 0.0, 6.358653, dense_1_w, -0.04849465, 0.048118282, dense_1_b, -0.1256221, 0.39936453, 1, 0.0, 8.22251, 9); 
-void* var_14 = FCLayer_PROMISE(var_13, 0.0, 8.22251, dense_2_w, -0.33108112, 0.31025946, dense_2_b, -1.0712193, 1.3323823, -1, -41.86109, 65.92535, 9); 
-void* var_15 = tensorSoftmax(var_14); 
-
-computeAccuracy2(labels,10000,var_15); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/src.cc b/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/src.cc
deleted file mode 100644
index 515b7331815f445ee7e2d5e50dbe4dff76b6caae..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/model_params/vgg16_cifar10_promise/src.cc
+++ /dev/null
@@ -1,141 +0,0 @@
-
-#include <stdio.h> 
-#include <stdlib.h> 
-#include <unistd.h> 
-#include <fcntl.h> 
-#include <sys/types.h> 
-#include <sys/stat.h> 
-#include <string.h> 
-#include "../../tensor_runtime/include/tensor_runtime.h" 
-#include "../include/utils.h" 
-
-int main(){ 
-
-llvm_hpvm_initTensorRt(0); 
-
-
-std::string dir_prefix = std::string("vgg16_cifar10_promise/"); 
-std::string input_path =  dir_prefix + std::string("input.bin"); 
-void* input = readTrainedWeights(input_path.c_str(), 0,10000,3,32,32); 
-std::string labels_path =  dir_prefix + std::string("labels.bin"); 
-uint8_t* labels = readLabels(labels_path.c_str(),10000); 
-std::string conv2d_1_w_path =  dir_prefix + std::string("conv2d_1_w.bin"); 
-void* conv2d_1_w =  readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3); 
-std::string conv2d_1_b_path =  dir_prefix + std::string("conv2d_1_b.bin"); 
-void* conv2d_1_b =  readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_2_w_path =  dir_prefix + std::string("conv2d_2_w.bin"); 
-void* conv2d_2_w =  readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3); 
-std::string conv2d_2_b_path =  dir_prefix + std::string("conv2d_2_b.bin"); 
-void* conv2d_2_b =  readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1); 
-std::string conv2d_3_w_path =  dir_prefix + std::string("conv2d_3_w.bin"); 
-void* conv2d_3_w =  readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3); 
-std::string conv2d_3_b_path =  dir_prefix + std::string("conv2d_3_b.bin"); 
-void* conv2d_3_b =  readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_4_w_path =  dir_prefix + std::string("conv2d_4_w.bin"); 
-void* conv2d_4_w =  readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3); 
-std::string conv2d_4_b_path =  dir_prefix + std::string("conv2d_4_b.bin"); 
-void* conv2d_4_b =  readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1); 
-std::string conv2d_5_w_path =  dir_prefix + std::string("conv2d_5_w.bin"); 
-void* conv2d_5_w =  readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3); 
-std::string conv2d_5_b_path =  dir_prefix + std::string("conv2d_5_b.bin"); 
-void* conv2d_5_b =  readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_6_w_path =  dir_prefix + std::string("conv2d_6_w.bin"); 
-void* conv2d_6_w =  readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_6_b_path =  dir_prefix + std::string("conv2d_6_b.bin"); 
-void* conv2d_6_b =  readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_7_w_path =  dir_prefix + std::string("conv2d_7_w.bin"); 
-void* conv2d_7_w =  readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3); 
-std::string conv2d_7_b_path =  dir_prefix + std::string("conv2d_7_b.bin"); 
-void* conv2d_7_b =  readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1); 
-std::string conv2d_8_w_path =  dir_prefix + std::string("conv2d_8_w.bin"); 
-void* conv2d_8_w =  readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3); 
-std::string conv2d_8_b_path =  dir_prefix + std::string("conv2d_8_b.bin"); 
-void* conv2d_8_b =  readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_9_w_path =  dir_prefix + std::string("conv2d_9_w.bin"); 
-void* conv2d_9_w =  readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_9_b_path =  dir_prefix + std::string("conv2d_9_b.bin"); 
-void* conv2d_9_b =  readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_10_w_path =  dir_prefix + std::string("conv2d_10_w.bin"); 
-void* conv2d_10_w =  readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_10_b_path =  dir_prefix + std::string("conv2d_10_b.bin"); 
-void* conv2d_10_b =  readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_11_w_path =  dir_prefix + std::string("conv2d_11_w.bin"); 
-void* conv2d_11_w =  readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_11_b_path =  dir_prefix + std::string("conv2d_11_b.bin"); 
-void* conv2d_11_b =  readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_12_w_path =  dir_prefix + std::string("conv2d_12_w.bin"); 
-void* conv2d_12_w =  readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_12_b_path =  dir_prefix + std::string("conv2d_12_b.bin"); 
-void* conv2d_12_b =  readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1); 
-std::string conv2d_13_w_path =  dir_prefix + std::string("conv2d_13_w.bin"); 
-void* conv2d_13_w =  readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3); 
-std::string conv2d_13_b_path =  dir_prefix + std::string("conv2d_13_b.bin"); 
-void* conv2d_13_b =  readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_1_w_path =  dir_prefix + std::string("dense_1_w.bin"); 
-void* dense_1_w =  readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512); 
-std::string dense_1_b_path =  dir_prefix + std::string("dense_1_b.bin"); 
-void* dense_1_b =  readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1); 
-std::string dense_2_w_path =  dir_prefix + std::string("dense_2_w.bin"); 
-void* dense_2_w =  readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,10); 
-std::string dense_2_b_path =  dir_prefix + std::string("dense_2_b.bin"); 
-void* dense_2_b =  readTrainedWeights(dense_2_b_path.c_str(), 0,1,10,1,1); 
-
-
-void* var_0 = tensorConvolution(input, conv2d_1_w, 1, 1, 1, 1, 1, 0); 
-void* var_1 = tensorAdd(var_0, conv2d_1_b); 
-void* var_2 = tensorRelu(var_1); 
-void* var_4 = tensorConvolution(var_2, conv2d_2_w, 1, 1, 1, 1, 1, 0); 
-void* var_5 = tensorAdd(var_4, conv2d_2_b); 
-void* var_6 = tensorRelu(var_5); 
-void* var_7 = tensorPooling(var_6,0,2,2,0,0,2,2); 
-void* var_8 = tensorConvolution(var_7, conv2d_3_w, 1, 1, 1, 1, 1, 0); 
-void* var_9 = tensorAdd(var_8, conv2d_3_b); 
-void* var_10 = tensorRelu(var_9); 
-void* var_12 = tensorConvolution(var_10, conv2d_4_w, 1, 1, 1, 1, 1, 0); 
-void* var_13 = tensorAdd(var_12, conv2d_4_b); 
-void* var_14 = tensorRelu(var_13); 
-void* var_15 = tensorPooling(var_14,0,2,2,0,0,2,2); 
-void* var_16 = tensorConvolution(var_15, conv2d_5_w, 1, 1, 1, 1, 1, 0); 
-void* var_17 = tensorAdd(var_16, conv2d_5_b); 
-void* var_18 = tensorRelu(var_17); 
-void* var_20 = tensorConvolution(var_18, conv2d_6_w, 1, 1, 1, 1, 1, 0); 
-void* var_21 = tensorAdd(var_20, conv2d_6_b); 
-void* var_22 = tensorRelu(var_21); 
-void* var_24 = tensorConvolution(var_22, conv2d_7_w, 1, 1, 1, 1, 1, 0); 
-void* var_25 = tensorAdd(var_24, conv2d_7_b); 
-void* var_26 = tensorRelu(var_25); 
-void* var_27 = tensorPooling(var_26,0,2,2,0,0,2,2); 
-void* var_28 = tensorConvolution(var_27, conv2d_8_w, 1, 1, 1, 1, 1, 0); 
-void* var_29 = tensorAdd(var_28, conv2d_8_b); 
-void* var_30 = tensorRelu(var_29); 
-void* var_32 = tensorConvolution(var_30, conv2d_9_w, 1, 1, 1, 1, 1, 0); 
-void* var_33 = tensorAdd(var_32, conv2d_9_b); 
-void* var_34 = tensorRelu(var_33); 
-void* var_36 = tensorConvolution(var_34, conv2d_10_w, 1, 1, 1, 1, 1, 0); 
-void* var_37 = tensorAdd(var_36, conv2d_10_b); 
-void* var_38 = tensorRelu(var_37); 
-void* var_39 = tensorPooling(var_38,0,2,2,0,0,2,2); 
-void* var_40 = tensorConvolution(var_39, conv2d_11_w, 1, 1, 1, 1, 1, 0); 
-void* var_41 = tensorAdd(var_40, conv2d_11_b); 
-void* var_42 = tensorRelu(var_41); 
-void* var_44 = tensorConvolution(var_42, conv2d_12_w, 1, 1, 1, 1, 1, 0); 
-void* var_45 = tensorAdd(var_44, conv2d_12_b); 
-void* var_46 = tensorRelu(var_45); 
-void* var_48 = tensorConvolution(var_46, conv2d_13_w, 1, 1, 1, 1, 1, 0); 
-void* var_49 = tensorAdd(var_48, conv2d_13_b); 
-void* var_50 = tensorRelu(var_49); 
-void* var_51 = tensorPooling(var_50,0,2,2,0,0,2,2); 
-void* var_54 = tensorGemmGPU(var_51, dense_1_w); 
-void* var_55 = tensorAdd(var_54, dense_1_b); 
-void* var_56 = tensorRelu(var_55); 
-void* var_58 = tensorGemmGPU(var_56, dense_2_w); 
-void* var_59 = tensorAdd(var_58, dense_2_b); 
-void* var_60 = tensorSoftmax(var_59); 
-
-computeAccuracy2(labels,10000,var_60); 
-
-llvm_hpvm_cleanupTensorRt(); 
-
-return 0; 
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/.gitignore b/llvm/projects/hpvm-tensor-rt/opentuner/.gitignore
deleted file mode 100644
index 9eb809777b0bcfdb2a7d91f9e671282ca03610a7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/.gitignore
+++ /dev/null
@@ -1,51 +0,0 @@
-*.py[cod]
-
-# C extensions
-*.so
-
-# Packages
-*.egg
-*.egg-info
-dist
-build
-eggs
-parts
-bin
-var
-sdist
-develop-eggs
-.installed.cfg
-lib
-lib64
-
-# Installer logs
-pip-log.txt
-
-# Unit test / coverage reports
-.coverage
-.tox
-nosetests.xml
-
-# Translations
-*.mo
-
-# Mr Developer
-.mr.developer.cfg
-.project
-.pydevproject
-
-#vim
-*.swp
-
-#virtualenv
-venv
-.ropeproject
-opentuner.log
-.*.swo
-opentuner.db
-.idea
-
-# SMB ROM (for SMB demo)
-smb.nes
-
-MANIFEST
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/AUTHORS.txt b/llvm/projects/hpvm-tensor-rt/opentuner/AUTHORS.txt
deleted file mode 100644
index 620e549e236ad322446694d11dc68a2f39a3ee31..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/AUTHORS.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-Jason Ansel
-Sam Fingeret
-Shoaib Kamil
-Deepak Narayanan
-Jonathan Ragan-Kelley
-Kalyan Veeramachaneni
-Kevin Wu
-Minshu Zhan
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/CHANGES.txt b/llvm/projects/hpvm-tensor-rt/opentuner/CHANGES.txt
deleted file mode 100644
index a0af44222226f64cceb85bb633072a25abb40777..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/CHANGES.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-v0.5.0, 2015-02-10 -- Refactoring and bugfixes.
-v0.4.0, 2014-10-26 -- Add api and bugfixes.
-v0.3.0, 2014-08-11 -- Initial release.
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/LICENSE.txt b/llvm/projects/hpvm-tensor-rt/opentuner/LICENSE.txt
deleted file mode 100644
index 2b602e192b4a2302cae3288e3bd34746ff8475df..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/LICENSE.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Jason Ansel
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/MANIFEST.in b/llvm/projects/hpvm-tensor-rt/opentuner/MANIFEST.in
deleted file mode 100644
index 376b77ae8f44a06787d5910191c713c986791a72..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/MANIFEST.in
+++ /dev/null
@@ -1 +0,0 @@
-include *.txt *.md
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/README.md b/llvm/projects/hpvm-tensor-rt/opentuner/README.md
deleted file mode 100644
index 729f35553a0fe22177a38f0545d03a4497bef03c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/README.md
+++ /dev/null
@@ -1,116 +0,0 @@
-OpenTuner
-=========
-
-Program autotuning has been demonstrated in many domains to achieve better
-or more portable performance.  However, autotuners themselves are often not
-very portable between projects because using a domain informed search space
-representation is critical to achieving good results and because no single
-search technique performs best for all problems.
-
-OpenTuner is a new framework for building domain-specific multi-objective
-program autotuners. OpenTuner supports fully customizable configuration
-representations, an extensible technique representation to allow for
-domain-specific techniques, and an easy to use interface for communicating
-with the tuned program. A key capability inside OpenTuner is the use of
-ensembles of disparate search techniques simultaneously, techniques which
-perform well will receive larger testing budgets and techniques which perform
-poorly will be disabled.
-
-System dependencies
--------------------
-
-A list of system dependencies can be found in [debian-packages-deps][]
-which are primarily python 2.6+ (not 3.x) and sqlite3 (or your
-[supported][sqlalchemy-dialects] database backend of choice).
-
-On Ubuntu/Debian there can be installed with:
-
-    sudo apt-get install `cat debian-packages-deps | tr '\n' ' '`
-
-[debian-packages-deps]: https://raw.github.com/jansel/opentuner/master/debian-packages-deps
-[sqlalchemy-dialects]: http://docs.sqlalchemy.org/en/rel_0_8/dialects/index.html
-
-
-Installation
--------------------
-OpenTuner (and dependencies) can be installed with
-
-    sudo pip install opentuner
-
-or
-
-    pip install --user opentuner
-
-This will not install any of the example programs.
-
-
-Development installation
--------------------
-For development (running OpenTuner out of a git checkout), a list of python
-dependencies can be found in [requirements.txt][] these can be installed
-system-wide with `pip`.
-
-    sudo apt-get install python-pip
-    sudo pip install -r requirements.txt
-
-Or you can use virtual env to create a isolated python environment by running:
-
-    python ./venv-bootstrap.py
-
-which will create a ./venv/bin/python (./venv/Scripts/python.exe on windows)
-with all the required packages installed.
-
-[requirements.txt]: https://raw.github.com/jansel/opentuner/master/requirements.txt
-
-
-Checking Installation
----------------------
-
-Quickly checking that a successful installation has been made, may be performed
-by running an example program such as:
-
-    ./examples/rosenbrock/rosenbrock.py
-
-
-Tutorials
----------
-
-- [Optimizing Block Matrix Multiplication][gettingstarted]
-- [Creating OpenTuner Techniques][technique-tutorial].
-
-[gettingstarted]: http://opentuner.org/tutorial/gettingstarted/
-[technique-tutorial]:  http://opentuner.org/tutorial/techniques/
-
-
-Papers
----------
-
-- [OpenTuner: An Extensible Framework for Program Autotuning][paper1]. <br>
-  Jason Ansel, Shoaib Kamil, Kalyan Veeramachaneni, Jonathan Ragan-Kelley,
-  Jeffrey Bosboom, Una-May O'Reilly, Saman Amarasinghe. <br>
-  International Conference on Parallel Architectures and Compilation
-  Techniques. <br>
-  Edmonton, Canada. August, 2014. [Slides][slides1]. [Bibtex][bibtex1].
-
-[paper1]: http://groups.csail.mit.edu/commit/papers/2014/ansel-pact14-opentuner.pdf
-[bibtex1]: http://groups.csail.mit.edu/commit/bibtex.cgi?key=ansel:pact:2014
-[slides1]: http://groups.csail.mit.edu/commit/papers/2014/ansel-pact14-opentuner-slides.pdf
-
-
-Contributing Code
------------------
-
-The preferred way to contribute code to OpenTuner is to fork the project
-on github and [submit a pull request][pull-req].
-
-[pull-req]: https://www.openshift.com/wiki/github-workflow-for-submitting-pull-requests
-
-
-Support
--------
-OpenTuner is supported in part by the United States Department of Energy
-[X-Stack][xstack] program as part of [D-TEC][dtec].
-
-[xstack]: http://science.energy.gov/ascr/research/computer-science/ascr-x-stack-portfolio/
-[dtec]: http://www.dtec-xstack.org/
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/__init__.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/accuracy_tuner.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/accuracy_tuner.py
deleted file mode 100644
index 5977fe7ee5b4780139d2c5a865c8231361cf0f2c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/accuracy_tuner.py
+++ /dev/null
@@ -1,198 +0,0 @@
-#!/usr/bin/env python
-#
-
-import adddeps  # fix sys.path
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-
-
-output_dir = ""
-flag_ranges = []
-tuning_flags = []
-binary_name = ""
-accuracy_threshold = 10.0
-opt_confs_index = 9
-evaluated_configs = {}
-
-
-def extractTotalOverhead(file_name):
-
-  total_comps = 0.0
-  file = open(file_name, "r")
-  for x in file:
-    words = x.split()
-    total_comps += float(words[opt_confs_index])
-  
-  print total_comps 
-  return total_comps
-
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  accuracy = float(acc_str)
-  print accuracy
-  return accuracy  
-  
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(accuracy_threshold)
-    input_manager = FixedInputManager(size=num_flags)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-    for flag in tuning_flags:
-      manipulator.add_parameter(
-        EnumParameter(flag, flag_ranges
-                      # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-                      )) #default is needed, optimizations don't work without it(tried and tested)
-    return manipulator
-
-  
-  def run(self, desired_result, input, limit):
-    """
-    Compile and run a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("opentuner_flags", cfg)
-    
-    run_cmd = binary_name
-    print run_cmd
-    run_result_call_program = self.call_program(run_cmd)
-    #print run_result_call_program
-
-    total_comps = extractTotalOverhead("accuracy_summary")
-    accuracy = getAccuracy("final_accuracy")
-    
-    #Result = opentuner.resultsdb.models.Result(time=total_comps)
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    Result.accuracy = accuracy
-
-    if accuracy > accuracy_threshold:
-      if accuracy not in evaluated_configs:
-        config_tuple = (total_comps, accuracy, cfg)
-        self.configs_list.append(config_tuple)
-        evaluated_configs[accuracy] = 1
-        shutil.copy('accuracy_summary', output_dir + '/' + binary_name + '_' + str(accuracy))
-
-    
-    return Result
-         
-
-  def save_final_config(self, configuration):
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-    
-    if not os.path.exists(result_dir):
-      os.mkdir(result_dir)
-    
-    createFlagsFile("opentuner_flags", configuration.data)
-    run_cmd = binary_name
-    run_result_call_program = self.call_program(run_cmd)
-
-    accuracy = getAccuracy("final_accuracy")
-    shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_final_' + str(accuracy) )
-
-    sorted_list = sorted(self.configs_list, key = lambda tup: tup[0])
-    print sorted_list[0:10]
-    
-    top_elems = 20
-    if len(sorted_list) < top_elems:
-      top_elems = len(sorted_list)
-
-      
-    for i in range(top_elems):
-      createFlagsFile("opentuner_flags", sorted_list[i][2])
-      run_cmd = binary_name
-      run_result_call_program = self.call_program(run_cmd)
-      accuracy = getAccuracy("final_accuracy")
-      shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_' + str(accuracy) + "_rank_" + str(i) )
-
-
-    #os.mkdir(result_dir + "full_results")
-  
-    
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='name of binary to run')
-  argparser.add_argument('--num-flags', type=int, help='num of flags to tune for')
-  argparser.add_argument('--error-range', type=int, help='num of flags to tune for') 
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='accuracy threshold')
-
-  
-  args = argparser.parse_args()
-  binary_name = str(args.binary)
-  print("binary_name = ", binary_name)
-  num_flags = int(args.num_flags)
-  error_range = int(args.error_range)
-  accuracy_threshold = float(args.accuracy)
-  print("accuracy = ", accuracy_threshold)
-  result_dir = args.result_dir  
-  if result_dir == "":
-    print("Provide --result-dir ")
-
-
-  output_dir = result_dir + "/full_results"
-  print output_dir
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(output_dir):
-    print("Creating output directory = ", output_dir)
-    os.mkdir(output_dir)
-
-  for j in range(error_range):
-    flag_ranges.append(j)
-
-  print("flag_ranges = ", flag_ranges)
-  
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-  
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/accuracy_tuner_piped.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/accuracy_tuner_piped.py
deleted file mode 100644
index 6d46c5762ead377292337c47d045ee5e58322954..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/accuracy_tuner_piped.py
+++ /dev/null
@@ -1,269 +0,0 @@
-#!/usr/bin/env python
-#
-# Optimize blocksize of apps/mmm_block.cpp
-#
-# This is an extremely simplified version meant only for tutorials
-#
-import adddeps  # fix sys.path
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-import subprocess
-import threading
-import psutil
-
-from measure_confidence import dump_high_confidence_files
-from select_top_results import select_top_results
-from time import sleep
-
-
-output_dir = ""
-flag_ranges = []
-tuning_flags = []
-binary_name = ""
-accuracy_threshold = 10.0
-opt_confs_index = 9
-evaluated_configs = {}
-orig_result_dir = ""
-
-
-def extractTotalOverhead(file_name):
-
-  total_comps = 0.0
-  file = open(file_name, "r")
-  for x in file:
-    words = x.split()
-    total_comps += float(words[opt_confs_index])
-  
-  print total_comps 
-  return total_comps
-
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  file.close()
-
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-    
-  print accuracy
-  return accuracy
-
-
-
-def kill(proc_pid):
-  process = psutil.Process(proc_pid)
-  for proc in process.children(recursive=True):
-    proc.kill()
-  process.kill()
-    
-
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(accuracy_threshold)
-    input_manager = FixedInputManager(size=num_flags)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-    FNULL = open(os.devnull, 'wb')
-    #run_result_call_program = self.call_program(run_cmd)
-    self.start_process = subprocess.Popen([binary_name, "opentuner_run"]) #,  stdout=FNULL);
-
-    try:
-      os.mkfifo("/tmp/myfifo")
-    except OSError, e:
-      print("FIFO exists")
-
-    
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-    for flag in tuning_flags:
-      manipulator.add_parameter(
-        EnumParameter(flag, flag_ranges
-                      # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-                      )) #default is needed, optimizations don't work without it(tried and tested)
-    return manipulator
-
-  
-  def run(self, desired_result, input, limit):
-
-    """
-    Run  a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("opentuner_flags", cfg)
-    
-    run_cmd = binary_name
-    print run_cmd
-    #run_result_call_program = self.call_program(run_cmd)      
-  
-    # Using Named Pipes to signal execution to the DNN outer thread
-    fifo = open("/tmp/myfifo", "w")
-    fifo.write("start_run")
-    fifo.close()
-
-    print "Waiting for process to signal back - when done processing one run"
-
-    fifo2 = open("/tmp/myfifo", "r")
-    fifo2.read()
-    fifo2.close()
-
-    print "Process Signalled back"
-
-    total_comps = extractTotalOverhead("accuracy_summary")
-    accuracy = getAccuracy("final_accuracy")
-
-    
-    #Result = opentuner.resultsdb.models.Result(time=total_comps)
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    Result.accuracy = accuracy
-
-    if accuracy > accuracy_threshold:
-      if accuracy not in evaluated_configs:
-        config_tuple = (total_comps, accuracy, cfg)
-        self.configs_list.append(config_tuple)
-        evaluated_configs[accuracy] = 1
-        shutil.copy('accuracy_summary', output_dir + '/' + binary_name + '_' + str(accuracy))
-
-        
-    print "done with one run"
-    
-    return Result
-
-
-  def save_final_config(self, configuration):
-
-    print "Dumping High Confidence results"
-    sleep(5)
-    
-    # Only dumping files with 95% confidence
-    dump_high_confidence_files(binary_name, orig_result_dir, accuracy_threshold, 95)
-    select_top_results(orig_result_dir + "/high_confidence")
-
-    
-    #self.start_process.kill()
-    kill(self.start_process.pid)
-    
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-
-    return
-
-    
-    if not os.path.exists(result_dir):
-      os.mkdir(result_dir)
-    
-    createFlagsFile("opentuner_flags", configuration.data)
-    run_cmd = binary_name
-    run_result_call_program = self.call_program(run_cmd)
-
-    accuracy = getAccuracy("final_accuracy")
-    shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_final_' + str(accuracy) )
-
-    sorted_list = sorted(self.configs_list, key = lambda tup: tup[0])
-    print sorted_list[0:10]
-    
-    top_elems = 20
-    if len(sorted_list) < top_elems:
-      top_elems = len(sorted_list)
-
-      
-    for i in range(top_elems):
-      createFlagsFile("opentuner_flags", sorted_list[i][2])
-      run_cmd = binary_name
-      run_result_call_program = self.call_program(run_cmd)
-      accuracy = getAccuracy("final_accuracy")
-      shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_' + str(accuracy) + "_rank_" + str(i) )
-
-
-    #os.mkdir(result_dir + "full_results")
-  
-    
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='name of binary to run')
-  argparser.add_argument('--num-flags', type=int, help='num of flags to tune for')
-  argparser.add_argument('--error-range', type=int, help='num of flags to tune for') 
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='accuracy threshold')
-
-  
-  args = argparser.parse_args()
-  binary_name = str(args.binary)
-  print("binary_name = ", binary_name)
-  num_flags = int(args.num_flags)
-  error_range = int(args.error_range)
-  accuracy_threshold = float(args.accuracy)
-  print("accuracy = ", accuracy_threshold)
-  result_dir = args.result_dir
-  orig_result_dir = result_dir
-  if result_dir == "":
-    print("Provide --result-dir ")
-
-
-  output_dir = result_dir + "/full_results"
-  print output_dir
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(output_dir):
-    print("Creating output directory = ", output_dir)
-    os.mkdir(output_dir)
-
-  for j in range(error_range):
-    flag_ranges.append(j)
-
-  print("flag_ranges = ", flag_ranges)
-  
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-  
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/adddeps.py
deleted file mode 100644
index 61fd4757d6a6045346e5cdcd3dfbfcdc00e236fa..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/adddeps.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/algo_tuner.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/algo_tuner.py
deleted file mode 100644
index b8145e179893bc0db2631cf1f7ee0f11bcc9be0e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/algo_tuner.py
+++ /dev/null
@@ -1,318 +0,0 @@
-#!/usr/bin/env python
-#
-# Algorithmic Approximation Tuning
-# Purpose: Tunes for Perforation, Sampling, Numerical Precision (FP16)
-
-
-import adddeps  
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-import subprocess
-import threading
-import psutil
-
-from measure_confidence2 import dump_promise_confidence_files3
-from measure_confidence2 import getConfidence, getMinAccuracy
-from select_top_results import select_top_results
-from time import sleep
-from pareto_curve import findParetoConfigs
-
-
-
-
-class TunerData:
-  def __init__(self):
-    self.binary_path = ""
-    self.output_dir = ""
-    self.num_layers = 0
-    self.knobs_list = []
-    self.knobs_speedup = {}
-    self.accuracy_threshold = 0
-    self.test_id = 0
-    self.layer_costs = []
-    self.tuning_flags = []
-    self.autotuner_runs = 0
-    
-
-
-tunerData = TunerData()
-
-
-def readCostFile(file_path):
-
-  layer_costs = []
-  f = open(file_path)
-  for x in f:
-    cost = float(x.strip())
-    layer_costs.append(cost)
-
-  print ("len(layer_costs) = ", layer_costs)
-  f.close()
-
-  return layer_costs
-
-  
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  file.close()
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-    
-  print (accuracy)
-  return accuracy
-    
-
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for i in range(tunerData.num_layers):  # flag in tunerData.tuning_flags:
-    flag = tunerData.tuning_flags[i]
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-  
-
-def readLayerKnobs(file_path):
-
-  f = open(file_path, "r")
-  knobs_list = []
-  for x in f:
-    knobs = []
-    vals = x.split(",")
-    for val in vals:
-      knobs.append(int(val))
-      
-    knobs_list.append(knobs)
-
-  print ("knobs_list = ", knobs_list)
-  
-  return knobs_list
-
-
-
-def readKnobConfig(file_path):
-
-  knobs_speedup = {}
-  f = open(file_path, "r")
-  for x in f:
-    toks = x.split("\t")
-    ID = int(toks[0].split(",")[1])
-
-    speedup = float(toks[2])
-    knobs_speedup[ID] = speedup
-  
-  print ("knobs_speedup = ", knobs_speedup)
-  
-  return knobs_speedup
-
-
-
-
-def getConfigCost(cfg):
-
-  orig_cost = 0.0
-  total_cost = 0.0
-  for it in range(tunerData.num_layers):
-    flag = tunerData.tuning_flags[it]
-    flag_value = cfg[flag]
-    op_cost = tunerData.layer_costs[it]
-    speedup = tunerData.knobs_speedup[flag_value]
-
-    total_cost += (op_cost * 1.0 / speedup * 1.0)
-    orig_cost += op_cost
-    
-    it += 1
-
-  speedup = (orig_cost * 1.0) / (total_cost * 1.0)
-  
-  return total_cost, speedup
-
-
-
-def appendTopLine(f_path, accuracy, total_runs, total_comps, speedup):
-
-  f_str = open(f_path, "r").read()
-
-  f_out = open(f_path, "w+")
-
-  f_out.write("total_runs=" + str(total_runs) + "\tconfidence=100.0" + "\tavg_accuracy=" + str(accuracy) + "\tconfig_cost=" + str(total_comps) + "\tspeedup=" + str(speedup) + "\n" )
-  f_out.write(f_str)
-
-  f_out.close()
-      
-
-
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(tunerData.accuracy_threshold)
-    input_manager = FixedInputManager(size=tunerData.num_layers)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-    
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-
-    for i in range(tunerData.num_layers):
-      tunerData.tuning_flags.append("flag" + str(i))
-
-         
-    #for flag in tunerData.tuning_flags:
-    for ind in range(tunerData.num_layers):
-        flag = tunerData.tuning_flags[ind]
-        manipulator.add_parameter(
-        EnumParameter(flag, tunerData.knobs_list[ind]))
-
-        print ("ind = ", ind, " len = ", len(tunerData.knobs_list))
-        print (tunerData.knobs_list[ind])
-        ind += 1  
-      
-    return manipulator
-
-  
-  
-  def run(self, desired_result, input, limit):
-    
-    """
-    Run  a given configuration then
-    return performance
-    """
-    global test_id
-    
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("promise_flags", cfg)
-    
-    run_cmd = tunerData.binary_path
-    print "\nbinary_path = ", run_cmd
- 
-
-    total_runs = 1 # NOTE: Single run sufficient in Algorithmic Approx Tuner
-    FNULL = open(os.devnull, 'wb')
-    p = subprocess.Popen([run_cmd, str(total_runs)], stdout = FNULL)
-    p.wait()
-
-       
-    accuracy = getAccuracy("final_accuracy")
-    
-    # getConfigCost returns the cost associated with the selected configuration
-    total_comps, speedup = getConfigCost(cfg)
-   
-    
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    #Result.accuracy = accuracy
-    min_accuracy = getMinAccuracy("run_accuracies.txt")
-    print ("min_accuracy = ", min_accuracy)
-    Result.accuracy = min_accuracy
-    
-    if min_accuracy > tunerData.accuracy_threshold:
-      config_tuple = (total_comps, accuracy, cfg)
-      self.configs_list.append(config_tuple)
-      f_path = tunerData.output_dir + '/' + tunerData.binary_path + '_' + str(tunerData.test_id)
-      shutil.copy('promise_flags', f_path)
-
-      appendTopLine(f_path, accuracy, total_runs, total_comps, speedup)
-
-      f_acc = open(tunerData.output_dir + '/' + tunerData.binary_path + '_' + str(tunerData.test_id) + "_accuracy", "w")
-      f_acc.write(str(accuracy))
-      f_acc.close()
-                   
-      
-    tunerData.test_id += 1
-    
-    return Result
-
-
-  def save_final_config(self, configuration):
-
-    print "Done with Autotuning Run \n"
-    sleep(2)
-
-    print "Final configuration", configuration.data
-
-    return
-
-  
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='path to target binary')
-  argparser.add_argument('--num-layers', type=int, help='num of flags to tune')
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='result directory')
-  argparser.add_argument('--cost-file', help='layer description')
-  argparser.add_argument('--knobs-config', help='knob settings and ID mapping')
-  argparser.add_argument('--layer-knobs', help='per-layer Knobs')
-  
-  
-  args = argparser.parse_args()
-
-  tunerData.binary_path = str(args.binary)
-  tunerData.num_layers = int(args.num_layers)
-  tunerData.accuracy_threshold = float(args.accuracy)
-
-
-  # NOTE: Reading the cost file (with No of ops) to better guide the Autotuner
-  cost_file_path = args.cost_file
-  tunerData.layer_costs = readCostFile(cost_file_path)
-
-  
-  tunerData.knobs_list = readLayerKnobs(args.layer_knobs)
-  tunerData.knobs_speedup = readKnobConfig(args.knobs_config)
-  
-  result_dir = args.result_dir
-  if result_dir == "":
-    print("Provide --result-dir ")
-        
-  tunerData.output_dir = result_dir + "/high_confidence/"
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(tunerData.output_dir):
-    print("Creating output directory = ", tunerData.output_dir)
-    os.mkdir(tunerData.output_dir)
-
-
-    
-  ClangFlagsTuner.main(argparser.parse_args())
-
-  
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/algo_tuner2.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/algo_tuner2.py
deleted file mode 100644
index 4ca0062f93441954d3ee0acc0eabf10352e3a76c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/algo_tuner2.py
+++ /dev/null
@@ -1,339 +0,0 @@
-#!/usr/bin/env python
-#
-# Algorithmic Approximation Tuning
-# Purpose: Tunes for Perforation, Sampling, Numerical Precision (FP16)
-
-
-import adddeps  
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-import subprocess
-import threading
-import psutil
-
-from measure_confidence2 import dump_promise_confidence_files4
-from measure_confidence2 import getConfidence, getMinAccuracy
-from select_top_results import select_top_results
-from time import sleep
-from pareto_curve import findParetoConfigs
-
-
-
-
-class TunerData:
-  def __init__(self):
-    self.binary_path = ""
-    self.output_dir = ""
-    self.num_layers = 0
-    self.knobs_list = []
-    self.knobs_speedup = {}
-    self.accuracy_threshold = 0
-    self.test_id = 0
-    self.layer_costs = []
-    self.tuning_flags = []
-    self.autotuner_runs = 0
-    
-
-
-tunerData = TunerData()
-
-
-orig_result_dir = ""
-
-
-def readCostFile(file_path):
-
-  layer_costs = []
-  f = open(file_path)
-  for x in f:
-    cost = float(x.strip())
-    layer_costs.append(cost)
-
-  print ("len(layer_costs) = ", layer_costs)
-  f.close()
-
-  return layer_costs
-
-  
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  file.close()
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-    
-  print (accuracy)
-  return accuracy
-    
-
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for i in range(tunerData.num_layers):  # flag in tunerData.tuning_flags:
-    flag = tunerData.tuning_flags[i]
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-  
-
-def readLayerKnobs(file_path):
-
-  f = open(file_path, "r")
-  knobs_list = []
-  for x in f:
-    knobs = []
-    vals = x.split(",")
-    for val in vals:
-      knobs.append(int(val))
-      
-    knobs_list.append(knobs)
-
-  print ("knobs_list = ", knobs_list)
-  
-  return knobs_list
-
-
-
-def readKnobConfig(file_path):
-
-  knobs_speedup = {}
-  f = open(file_path, "r")
-  for x in f:
-    toks = x.split("\t")
-    ID = int(toks[0].split(",")[1])
-
-    speedup = float(toks[2])
-    knobs_speedup[ID] = speedup
-  
-  print ("knobs_speedup = ", knobs_speedup)
-  
-  return knobs_speedup
-
-
-
-
-def getConfigCost(cfg):
-
-  orig_cost = 0.0
-  total_cost = 0.0
-  for it in range(tunerData.num_layers):
-    flag = tunerData.tuning_flags[it]
-    flag_value = cfg[flag]
-    op_cost = tunerData.layer_costs[it]
-    speedup = tunerData.knobs_speedup[flag_value]
-
-    total_cost += (op_cost * 1.0 / speedup * 1.0)
-    orig_cost += op_cost
-    
-    it += 1
-
-  speedup = (orig_cost * 1.0) / (total_cost * 1.0)
-  
-  return total_cost, speedup
-
-
-
-def appendTopLine(f_path, accuracy, total_runs, total_comps, speedup):
-
-  f_str = open(f_path, "r").read()
-
-  f_out = open(f_path, "w+")
-
-  f_out.write("total_runs=" + str(total_runs) + "\tconfidence=100.0" + "\tavg_accuracy=" + str(accuracy) + "\tconfig_cost=" + str(total_comps) + "\tspeedup=" + str(speedup) + "\n" )
-  f_out.write(f_str)
-
-  f_out.close()
-      
-
-
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(tunerData.accuracy_threshold)
-    input_manager = FixedInputManager(size=tunerData.num_layers)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-    
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-
-    for i in range(tunerData.num_layers):
-      tunerData.tuning_flags.append("flag" + str(i))
-
-         
-    #for flag in tunerData.tuning_flags:
-    for ind in range(tunerData.num_layers):
-        flag = tunerData.tuning_flags[ind]
-        manipulator.add_parameter(
-        EnumParameter(flag, tunerData.knobs_list[ind]))
-
-        print ("ind = ", ind, " len = ", len(tunerData.knobs_list))
-        print (tunerData.knobs_list[ind])
-        ind += 1  
-      
-    return manipulator
-
-  
-  
-  def run(self, desired_result, input, limit):
-    
-    """
-    Run  a given configuration then
-    return performance
-    """
-    global test_id
-    
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("promise_flags", cfg)
-    
-    run_cmd = tunerData.binary_path
-    print "\nbinary_path = ", run_cmd
-
-
-    input_size = 5000
-    offset = 5000
-
-    total_runs = 2 # NOTE: Single run sufficient in Algorithmic Approx Tuner
-    FNULL = open(os.devnull, 'wb')
-    p = subprocess.Popen([run_cmd, str(total_runs), str(tunerData.accuracy_threshold), str(1), str(input_size), str(offset) ], stdout = FNULL)
-    p.wait()
-
-    #total_runs = 2 # NOTE: Atleast two runs for promise tuner
-    #FNULL = open(os.devnull, 'wb')
-    #p = subprocess.Popen([run_cmd, str(total_runs)], stdout = FNULL)
-    #p.wait()
-
-       
-    accuracy = getAccuracy("final_accuracy")
-    
-    # getConfigCost returns the cost associated with the selected configuration
-    total_comps, speedup = getConfigCost(cfg)
-   
-    
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    #Result.accuracy = accuracy
-    min_accuracy = getMinAccuracy("run_accuracies.txt")
-    print ("min_accuracy = ", min_accuracy)
-    Result.accuracy = min_accuracy
-    
-    if min_accuracy > tunerData.accuracy_threshold:
-      config_tuple = (total_comps, accuracy, cfg)
-      self.configs_list.append(config_tuple)
-      f_path = tunerData.output_dir + '/' + tunerData.binary_path + '_' + str(tunerData.test_id)
-      shutil.copy('promise_flags', f_path)
-
-      appendTopLine(f_path, accuracy, total_runs, total_comps, speedup)
-
-      
-    tunerData.test_id += 1
-    
-    return Result
-
-
-  def save_final_config(self, configuration):
-
-    print "Done with Autotuning Run \n"
-    sleep(2)
-
-
-    #findParetoConfigs(orig_result_dir, layer_costs, accuracy_threshold)
-
-    input_dir = orig_result_dir + "/full_results/"
-    output_dir = orig_result_dir + "/high_confidence/"
-    
-    # Only dumping files with 95% confidence
-    dump_promise_confidence_files4(tunerData.binary_path, input_dir, output_dir, tunerData.layer_file, tunerData.num_layers, tunerData.accuracy_threshold, tunerData.layer_costs, 95, tunerData.knobs_speedup)
-
-    
-    print "Final configuration", configuration.data
-
-    return
-
-  
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='path to target binary')
-  argparser.add_argument('--num-layers', type=int, help='num of flags to tune')
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='result directory')
-
-  argparser.add_argument('--layer-file', help='layer file')
-
-  argparser.add_argument('--cost-file', help='layer description')
-  argparser.add_argument('--knobs-config', help='knob settings and ID mapping')
-  argparser.add_argument('--layer-knobs', help='per-layer Knobs')
-  
-  
-  args = argparser.parse_args()
-
-  tunerData.binary_path = str(args.binary)
-  tunerData.num_layers = int(args.num_layers)
-  tunerData.accuracy_threshold = float(args.accuracy)
-
-  tunerData.layer_file = args.layer_file
-
-  # NOTE: Reading the cost file (with No of ops) to better guide the Autotuner
-  cost_file_path = args.cost_file
-  tunerData.layer_costs = readCostFile(cost_file_path)
-
-  
-  tunerData.knobs_list = readLayerKnobs(args.layer_knobs)
-  tunerData.knobs_speedup = readKnobConfig(args.knobs_config)
-  
-  result_dir = args.result_dir
-  if result_dir == "":
-    print("Provide --result-dir ")
-
-  orig_result_dir = result_dir  
-  tunerData.output_dir = result_dir + "/full_results/"
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(tunerData.output_dir):
-    print("Creating output directory = ", tunerData.output_dir)
-    os.mkdir(tunerData.output_dir)
-
-
-    
-  ClangFlagsTuner.main(argparser.parse_args())
-
-  
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/approxhpvm_tuner.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/approxhpvm_tuner.py
deleted file mode 100644
index 9ae2266bf481a9dd772fd139b375463b35bcd1b9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/approxhpvm_tuner.py
+++ /dev/null
@@ -1,262 +0,0 @@
-#!/usr/bin/env python
-#
-# Optimize blocksize of apps/mmm_block.cpp
-#
-# This is an extremely simplified version meant only for tutorials
-#
-import adddeps  # fix sys.path
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-import subprocess
-import threading
-import psutil
-
-from measure_confidence2 import dump_high_confidence_files
-from select_top_results import select_top_results
-from time import sleep
-
-
-output_dir = ""
-flag_ranges = []
-tuning_flags = []
-binary_name = ""
-accuracy_threshold = 10.0
-opt_confs_index = 9
-evaluated_configs = {}
-orig_result_dir = ""
-
-
-def copyTunerRuntime():
-  tensor_rt_path = os.environ["LLVM_SRC_ROOT"]
-  if tensor_rt_path == "":
-    print "LLVM_SRC_ROOT NOT SET"
-    sys.exit(0)
-
-  print "tensor_rt_path = ", tensor_rt_path  
-
-  
-
-
-def extractTotalOverhead(file_name):
-
-  total_comps = 0.0
-  file = open(file_name, "r")
-  for x in file:
-    words = x.split()
-    total_comps += float(words[opt_confs_index])
-  
-  print total_comps 
-  return total_comps
-
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  file.close()
-
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-    
-  print accuracy
-  return accuracy
-
-
-
-def kill(proc_pid):
-  process = psutil.Process(proc_pid)
-  for proc in process.children(recursive=True):
-    proc.kill()
-  process.kill()
-    
-
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    
-    objective = ThresholdAccuracyMinimizeTime(accuracy_threshold)
-    input_manager = FixedInputManager(size=num_flags)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-  
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-    for flag in tuning_flags:
-      manipulator.add_parameter(
-        EnumParameter(flag, flag_ranges
-                      # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-                      )) #default is needed, optimizations don't work without it(tried and tested)
-    return manipulator
-
-  
-  def run(self, desired_result, input, limit):
-
-    """
-    Run  a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("opentuner_flags", cfg)
-    
-    run_cmd = binary_name
-    print "binary_name = ", run_cmd
-    #run_result_call_program = self.call_program(run_cmd)
-    #print "returned \n\n"
-
-    FNULL = open(os.devnull, 'wb')
-    p = subprocess.Popen(run_cmd, stdout = FNULL)
-    p.wait()
-    
-    total_comps = extractTotalOverhead("accuracy_summary")
-    accuracy = getAccuracy("final_accuracy")
-    
-    #Result = opentuner.resultsdb.models.Result(time=total_comps)
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    Result.accuracy = accuracy
-
-    if accuracy > accuracy_threshold:
-      if accuracy not in evaluated_configs:
-        config_tuple = (total_comps, accuracy, cfg)
-        self.configs_list.append(config_tuple)
-        evaluated_configs[accuracy] = 1
-        shutil.copy('accuracy_summary', output_dir + '/' + binary_name + '_' + str(accuracy))
-
-       
-    print "done with one run"
-    
-    return Result
-
-
-  def save_final_config(self, configuration):
-
-    print "Dumping High Confidence results"
-    sleep(5)
-    
-    # Only dumping files with 95% confidence
-    dump_high_confidence_files(binary_name, orig_result_dir, accuracy_threshold, 95)
-    select_top_results(orig_result_dir + "/high_confidence")
-
-    
-    #self.start_process.kill()
-    kill(self.start_process.pid)
-    
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-
-    return
-
-    
-    if not os.path.exists(result_dir):
-      os.mkdir(result_dir)
-    
-    createFlagsFile("opentuner_flags", configuration.data)
-    run_cmd = binary_name
-    run_result_call_program = self.call_program(run_cmd)
-
-    accuracy = getAccuracy("final_accuracy")
-    shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_final_' + str(accuracy) )
-
-    sorted_list = sorted(self.configs_list, key = lambda tup: tup[0])
-    print sorted_list[0:10]
-    
-    top_elems = 20
-    if len(sorted_list) < top_elems:
-      top_elems = len(sorted_list)
-
-      
-    for i in range(top_elems):
-      createFlagsFile("opentuner_flags", sorted_list[i][2])
-      run_cmd = binary_name
-      run_result_call_program = self.call_program(run_cmd)
-      accuracy = getAccuracy("final_accuracy")
-      shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_' + str(accuracy) + "_rank_" + str(i) )
-
-
-    #os.mkdir(result_dir + "full_results")
-  
-    
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='name of binary to run')
-  argparser.add_argument('--num-flags', type=int, help='num of flags to tune for')
-  argparser.add_argument('--error-range', type=int, help='num of flags to tune for') 
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='accuracy threshold')
-
-  
-  args = argparser.parse_args()
-  binary_name = str(args.binary)
-  print("binary_name = ", binary_name)
-  num_flags = int(args.num_flags)
-  error_range = int(args.error_range)
-  accuracy_threshold = float(args.accuracy)
-  print("accuracy = ", accuracy_threshold)
-  result_dir = args.result_dir
-  orig_result_dir = result_dir
-  if result_dir == "":
-    print("Provide --result-dir ")
-
-
-  output_dir = result_dir + "/full_results"
-  print output_dir
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(output_dir):
-    print("Creating output directory = ", output_dir)
-    os.mkdir(output_dir)
-
-  for j in range(error_range):
-    flag_ranges.append(j)
-
-  print("flag_ranges = ", flag_ranges)
-  
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-  
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/gettingstarted.md b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/gettingstarted.md
deleted file mode 100644
index 8a442c5f44d6c501f686125d4468ca642f745920..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/gettingstarted.md
+++ /dev/null
@@ -1,215 +0,0 @@
----
-layout: default
-title: OpenTuner - Using OpenTuner
-permalink: /tutorial/gettingstarted/index.html
----
-
-Tutorial: Optimizing Block Matrix Multiplication
-================================================
-
-This tutorial assumes that you have checked out a copy of opentuner. For
-guidelines on how to get opentuner set up, refer [here][setup].
-
-[setup]: http://opentuner.org/tutorial/setup/
-
-Identifying a Program to Autotune
----------------------------------
-
-In order to do autotuning, you first need something to autotune. This will
-normally be your own program that you want to make either fast or better in
-some way.  For this tutorial we will use a blocked version of matrix multiply
-as an example. We will use opentuner to find the optimal value of the block
-size parameter.
-
-We will autotune the sample code below(based off of modification of code
-found [here][matrix-multiply-code]), making sure to take the block size as
-a compile time constant to the program.
-
-[matrix-multiply-code]: http://csapp.cs.cmu.edu/public/waside/waside-blocking.pdf
-
-Save the sample code below to examples/tutorials/mmm_block.cpp
-
-    #include <stdio.h>
-    #include <cstdlib>
-
-    #define N 100
-    
-    int main(int argc, const char** argv)
-    {
-    
-      int n = BLOCK_SIZE * (N/BLOCK_SIZE);
-      int a[N][N];
-      int b[N][N];
-      int c[N][N];
-      int sum=0;
-      for(int k1=0;k1<n;k1+=BLOCK_SIZE)
-      {
-          for(int j1=0;j1<n;j1+=BLOCK_SIZE)
-          {
-              for(int k1=0;k1<n;k1+=BLOCK_SIZE)
-              {
-                  for(int i=0;i<n;i++)
-                  {
-                      for(int j=j1;j<j1+BLOCK_SIZE;j++)
-                      {
-                          sum = c[i][j];
-                          for(int k=k1;k<k1+BLOCK_SIZE;k++)
-                          {
-                              sum += a[i][k] * b[k][j];
-                          }
-                          c[i][j] = sum;
-                      }
-                  }
-              }
-          }
-             }
-      return 0;
-    }
-
-Creating a New Autotuner with Opentuner
-------------------------------------
-Now we need to create a program that uses OpenTuner to optimize the program we just saved.
-
-Save the following code to examples/tutorials/mmm_tuner.py
-
-    #!/usr/bin/env python
-    #
-    # Optimize blocksize of apps/mmm_block.cpp
-    #
-    # This is an extremely simplified version meant only for tutorials
-    #
-    import adddeps  # fix sys.path
-
-    import opentuner
-    from opentuner import ConfigurationManipulator
-    from opentuner import IntegerParameter
-    from opentuner import MeasurementInterface
-    from opentuner import Result
-
-
-    class GccFlagsTuner(MeasurementInterface):
-
-      def manipulator(self):
-        """
-        Define the search space by creating a
-        ConfigurationManipulator
-        """
-        manipulator = ConfigurationManipulator()
-        manipulator.add_parameter(
-          IntegerParameter('blockSize', 1, 10))
-        return manipulator
-
-      def run(self, desired_result, input, limit):
-        """
-        Compile and run a given configuration then
-        return performance
-        """
-        cfg = desired_result.configuration.data
-
-        gcc_cmd = 'g++ mmm_block.cpp '
-        gcc_cmd += '-DBLOCK_SIZE='+ cfg['blockSize']
-        gcc_cmd += ' -o ./tmp.bin'
-
-        compile_result = self.call_program(gcc_cmd)
-        assert compile_result['returncode'] == 0
-
-        run_cmd = './tmp.bin'
-
-        run_result = self.call_program(run_cmd)
-        assert run_result['returncode'] == 0
-
-        return Result(time=run_result['time'])
-
-      def save_final_config(self, configuration):
-        """called at the end of tuning"""
-        print "Optimal block size written to mmm_final_config.json:", configuration.data
-        self.manipulator().save_to_file(configuration.data,
-                                        'mmm_final_config.json')
-
-
-    if __name__ == '__main__':
-      argparser = opentuner.default_argparser()
-      GccFlagsTuner.main(argparser.parse_args())
-
-
-This file consists of several components, each of which will be discussed in further detail below.
-
-Tuning Programs have a general structure as follows:
-
-    from opentuner import MeasurementInterface
-    from opentuner import Result
-
-Create an instance of class GccFlagsTuner, which tunes specified parameters using opentuner.
-    class GccFlagsTuner(MeasurementInterface):
-
-The manipulator method defines the variable search space by specifying parameters that should be tuned by this instance of GccFlagsTuner
-
-    def manipulator(self):
-      """
-      Define the search space by creating a
-      ConfigurationManipulator
-      """
-      manipulator = ConfigurationManipulator()
-      manipulator.add_parameter(
-        IntegerParameter('blockSize', 1, 10))
-      return manipulator
-
-The run method actually runs opentuner under the given configuration and returns the calculated performance under this configuration. In this example, the blockSize parameter to be tuned is input as a compile-time constant that takes on a value within the specified range each time it is run. However, opentuner also supports other methods of specifying these parameters that may be preferred in different use cases.
-
-    def run(self, desired_result, input, limit):
-      """
-      Compile and run a given configuration then
-      return performance
-      """
-      cfg = desired_result.configuration.data
-
-      gcc_cmd = 'g++ mmm_block.cpp '
-      gcc_cmd += '-DBLOCK_SIZE='+ cfg['blockSize']
-      gcc_cmd += ' -o ./tmp.bin'
-
-      compile_result = self.call_program(gcc_cmd)
-      assert compile_result['returncode'] == 0
-
-      run_cmd = './tmp.bin'
-
-      run_result = self.call_program(run_cmd)
-      assert run_result['returncode'] == 0
-
-      return Result(time=run_result['time'])
-
-We can actually display the result of running opentuner(the optimal block size for our multiplication problem) by creating a method, save_final_config() in our class. This saves a json dictionary of the optimal blockSize parameter found to the file mmm_final_config.json
-
-    def save_final_config(self, configuration):
-      """called at the end of tuning"""
-      print "Optimal block size written to mmm_final_config.json:", configuration.data
-      self.manipulator().save_to_file(configuration.data,
-                                      'mmm_final_config.json')
-
-    if __name__ == '__main__':
-      argparser = opentuner.default_argparser()
-      GccFlagsTuner.main(argparser.parse_args())
-
-Generating and Viewing Results
-------------------------------
-
-Run the following command to autotune our program(The --no-dups flag hides warnings about duplicate results and the --stop-after parameter specifies that we are running opentuner for a maximum of 30 seconds):
-
-    python mmm_tuner.py --no-dups --stop-after=30
-
-The results of each run configuration will be displayed as follows(output lines are truncated for readability here):
-
-    [    10s]    INFO opentuner.search.plugin.DisplayPlugin: tests=10, best {'BLOCK_SIZE': 4}, cost time=0.0081, found by DifferentialEvolutionAlt[...]
-    [    19s]    INFO opentuner.search.metatechniques: AUCBanditMetaTechniqueA: [('DifferentialEvolutionAlt', 477), ('UniformGreedyMutation', 18), ('NormalGreedyMutation', 5), ('RandomNelderMead', 1)]
-    [    20s]    INFO opentuner.search.plugin.DisplayPlugin: tests=10, best {'BLOCK_SIZE': 4}, cost time=0.0081, found by DifferentialEvolutionAlt[...]
-    [    30s]    INFO opentuner.search.plugin.DisplayPlugin: tests=10, best {'BLOCK_SIZE': 4}, cost time=0.0081, found by DifferentialEvolutionAlt[...]
-    [    30s]    INFO opentuner.search.plugin.DisplayPlugin: tests=10, best {'BLOCK_SIZE': 4}, cost time=0.0081, found by DifferentialEvolutionAlt[...]
-    Optimal block size written to mmm_final_config.json: {'BLOCK_SIZE': 4}
-
-
-Look up the optimal BlockSize value by inspecting the following created file:
-
-    mmm_final_config.json
-
-In this example, the output file content was as follows:
-
-    {'BLOCK_SIZE': 4}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/measure_confidence.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/measure_confidence.py
deleted file mode 100644
index dd7a050ac8428f99872abd25d1aa2f3d794f7e2b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/measure_confidence.py
+++ /dev/null
@@ -1,290 +0,0 @@
-
-import argparse
-import os
-import sys
-from time import sleep
-
-
-def getAccuracy(file_name):
-
-  if not os.path.exists(file_name):
-    print("final_accuracy file not found ")
-    sys.exit(0)
-    
-  file = open(file_name, "r")
-  acc_str = file.read()
-  accuracy = float(acc_str)
-  print accuracy
-  return accuracy  
-
-
-total_runs = 40
-skip_lines = 0
-
-
-def test_func():
-  print "test_func"
-  sys.exit(0)
-
-
-def do_multiple_runs(binary_name, accuracy_threshold, confidence_threshold):
-
-  #total_runs = 100.0
-  successful_runs = 0.0
-  total_acc = 0
-
-  for i in range(int(total_runs)):
-
-    fifo = open("/tmp/myfifo", "w")
-    fifo.write("start_run")
-    fifo.close()
-
-    print "Waiting for process to signal back - when done processing one run"
-
-    fifo2 = open("/tmp/myfifo", "r")
-    fifo2.read()
-    fifo2.close()
-
-    print "Process Signalled back"
-
-    accuracy = getAccuracy("final_accuracy")
-    total_acc += accuracy
-
-    if accuracy > accuracy_threshold:
-      successful_runs += 1
-
-  confidence = (successful_runs / (total_runs*1.0) ) * 100.0    
-  print("confidence = ", confidence)    
-  avg_acc = total_acc / total_runs
-  print("average accuracy = ", avg_acc)
-
-  return confidence, avg_acc
-  
-
-def compute_confidence(binary_name, accuracy, confidence, result_dir, output_dir):
-
-  confidence_list = []
-  
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  for file_name in file_names:
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-    
-    f = open(result_dir + "/" + file_name)
-    tuner_file = open("opentuner_flags", "w+")
-
-    index = 0
-    results_str = ""
-    for x in f:
-      if index >= skip_lines:
-        error_knob = int(float(x.split()[1]))
-        print error_knob
-        tuner_file.write(str(error_knob) + "\n")
-
-      results_str += x
-      index += 1
-      
-    tuner_file.close()
-    
-    run_confidence, avg_accuracy = do_multiple_runs(binary_name, accuracy, confidence)
-
-    if run_confidence > 90:
-      f2 = open(output_dir + "/" + file_name, "w+")
-      f2.write("total_runs=" + str(total_runs) + "\t confidence=" + str(run_confidence) + "\t avg_accuracy=" + str(avg_accuracy) + "\n")
-      f2.write(results_str)
-      f2.close()
-
-    conf_result = (run_confidence, avg_accuracy, file_name)
-    confidence_list.append(conf_result) 
-
-  return confidence_list
-
-
-
-
-def compute_promise_confidence(binary_name, accuracy, confidence, result_dir, output_dir):
-
-  confidence_list = []
-  
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  for file_name in file_names:
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-    
-    f = open(result_dir + "/" + file_name)
-    tuner_file = open("opentuner_flags", "w+")
-
-    config_str = f.read()
-    tuner_file.write(config_str)  
-    tuner_file.close()
-    
-    run_confidence, avg_accuracy = do_multiple_runs(binary_name, accuracy, confidence)
-
-    if run_confidence > 90:
-      f2 = open(output_dir + "/" + file_name, "w+")
-      f2.write("total_runs=" + str(total_runs) + "\t confidence=" + str(run_confidence) + "\t avg_accuracy=" + str(avg_accuracy) + "\n")
-      f2.write(config_str)
-      f2.close()
-
-    flags_str = config_str.replace('\n', ',')
-    conf_result = (run_confidence, avg_accuracy, file_name, flags_str)
-    confidence_list.append(conf_result) 
-
-  return confidence_list
-
-
-
-
-def dump_high_confidence_files(binary, result_dir, accuracy, confidence):
-
-  #result_dir = args.result_dir
-  output_dir = result_dir + "/high_confidence"
-  result_dir = result_dir + "/full_results"
-
-  if not os.path.exists(output_dir):
-    os.mkdir(output_dir)
-
-    
-  confidence_list = compute_confidence(binary, accuracy, confidence, result_dir, output_dir)
-  print confidence_list
-
-  # descending sort on confidence
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[0], reverse=True)
-   
-  output_file = open(output_dir + "/confidence_summary.txt", "w+")
-  for x in sorted_list:
-    output_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[2]) + "\n")    
-
-  output_file.close()
-  print  "Dumped Confidence Summary"
-  
-
-def processLayerFile(layer_file_path):
-
-  layer_sizes = []
-  layer_file = open(layer_file_path, "r")
-
-  for layer_size in layer_file:
-    try:
-      size = int(layer_size)
-      layer_sizes.append(size)
-    except:
-      return layer_sizes
-
-  return layer_sizes
-
-
-
-def getLayerConfigStr(config_str, layer_sizes, num_flags):
-
-  new_config_str = ""
-  config_vals = config_str.split(',')
-  it_count = 0
-  for val in config_vals:
-    if val == "":
-      continue
-    
-    config_val = int(val)
-    # For FP32 and FP32 values, each tensor op needs to be annotated
-    if config_val == 8 or config_val == 9:
-      for i in range(layer_sizes[it_count] - 1):
-        new_config_str += val + " "
-      new_config_str += val
-      if it_count < num_flags - 1:
-        new_config_str += ","
-    else:
-      new_config_str += val
-      if it_count < num_flags - 1:
-        new_config_str += ","
-
-    it_count += 1  
-
-  return new_config_str
-
-
-def dump_promise_confidence_files(binary, result_dir, layer_file_path, num_flags, accuracy, confidence):
-
-  #result_dir = args.result_dir
-  output_dir = result_dir + "/high_confidence"
-  input_dir = result_dir + "/full_results"
-
-  if not os.path.exists(output_dir):
-    os.mkdir(output_dir)
-    
-
-  layer_sizes = processLayerFile(layer_file_path);
-  print layer_sizes
-  sleep(3)
-    
-  confidence_list = compute_promise_confidence(binary, accuracy, confidence, input_dir, output_dir)
-  print confidence_list
-
-  # Ascending sort on accuracy
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[1])
-   
-  promise_file = open(output_dir + "/promise_confs.txt", "w+")
-  confidence_file = open(output_dir + "/confidence_summary.txt", "w+")
-
-  max_configs = 50
-  it_count = 0
-  for x in sorted_list:
-    if x[1] > accuracy and x[0] > confidence:
-      config_str = getLayerConfigStr(x[3], layer_sizes, num_flags)
-      promise_file.write(config_str + "\n")
-      it_count += 1
-      if it_count > max_configs:
-        break
-       
-    confidence_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[3]) + "\n")    
-    
-  promise_file.close()
-  confidence_file.close()
-  
-  print  "Dumped Confidence Summary"
-
-  
-
-
-
-if __name__ == "__main__":
-
-  argparser = argparse.ArgumentParser(description='runs best configs to get high confidence on accuracy')
-  argparser.add_argument('--result-dir', help='Directory containing OpenTuner configurations')
-  argparser.add_argument('--output-dir', help='Directory for storing output directory')
-  argparser.add_argument('--binary', help='Binary name to run')
-  argparser.add_argument('--accuracy', type=float,  help='Accuracy constraint')
-  argparser.add_argument('--confidence', type=float, help='Confidence threshold')
-  
-
-  args = argparser.parse_args()
-  result_dir = args.result_dir
-  output_dir = args.output_dir
-  binary = args.binary
-  accuracy = args.accuracy
-  confidence = args.confidence
-
-  confidence_list = compute_confidence(binary, accuracy, confidence, result_dir, output_dir)
-  #print confidence_list
-
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[0], reverse=True)
-   
-  output_file = open(output_dir + "/confidence_summary.txt", "w+")
-  for x in sorted_list:
-    output_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[2]) + "\n")    
-
-  output_file.close()
-  
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/measure_confidence2.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/measure_confidence2.py
deleted file mode 100644
index f5998ff3c871fe2db625873dc75fcf8fe4452838..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/measure_confidence2.py
+++ /dev/null
@@ -1,664 +0,0 @@
-
-import argparse
-import os
-import sys
-import subprocess
-from time import sleep
-
-
-def getAccuracy(file_name):
-
-  if not os.path.exists(file_name):
-    print("final_accuracy file not found ")
-    sys.exit(0)
-    
-  file = open(file_name, "r")
-  acc_str = file.read()
-  accuracy = float(acc_str)
-  print accuracy
-  return accuracy  
-
-
-total_runs = 40.0
-fails_allowed = 3
-skip_lines = 0
-
-
-def test_func():
-  print "test_func"
-  sys.exit(0)
-
-  
-
-def do_multiple_runs(binary_name, accuracy_threshold, confidence_threshold):
-
-  successful_runs = 0.0
-  unsuccessful_runs = 0.0
-  total_acc = 0
-
-  for i in range(int(total_runs)):
-    FNULL = open(os.devnull, 'wb')
-    p = subprocess.Popen(binary_name, stdout = FNULL)
-    p.wait()
-    
-    accuracy = getAccuracy("final_accuracy")
-    total_acc += accuracy
-
-    if accuracy > accuracy_threshold:
-      successful_runs += 1
-    else:
-      unsuccessful_runs += 1
-
-    if unsuccessful_runs > 6:
-      break
-      
-
-  confidence = (successful_runs / total_runs) * 100.0    
-  print("confidence = ", confidence)    
-  avg_acc = total_acc / total_runs
-  print("average accuracy = ", avg_acc)
-
-  return confidence, avg_acc
-
-
-
-def getConfidence(accuracy_outfile, acc_threshold):
-
-  f = open(accuracy_outfile, "r")
-
-  total_acc = 0.0
-  failed = 0
-  it = 0
-  
-  for x in f:
-    acc = float(x.strip())
-    if acc < acc_threshold:
-      failed += 1
-
-    total_acc += acc     
-    it += 1
-
-  conf = (it * 1.0 - failed) / it * 100
-  avg_acc = total_acc / it
-  
-  return conf, avg_acc
-
-
-
-def getMinAccuracy(accuracy_outfile):
-
-  f = open(accuracy_outfile, "r")
-
-  total_acc = 0.0
-  failed = 0
-  it = 0
-
-  acc_list = []
-  for x in f:
-    acc = float(x.strip())
-    acc_list.append(acc)
-    
-  return min(acc_list)
-
-  
-# NOTE: invokes the binary with the number of runs
-def do_multiple_runs2(binary_name, accuracy_threshold, confidence_threshold):
-
-  successful_runs = 0.0
-  unsuccessful_runs = 0.0
-  total_acc = 0
-
-  FNULL = open(os.devnull, 'wb')
-  p = subprocess.Popen([binary_name, str(int(total_runs)), str(accuracy_threshold), str(fails_allowed)], stdout = FNULL)
-  p.wait()
-
-  confidence, avg_acc = getConfidence("run_accuracies.txt", accuracy_threshold) 
-
-  print("confidence = ", confidence)    
-  print("average accuracy = ", avg_acc)
-
-  return confidence, avg_acc
-  
-
-
-
-
-def compute_confidence(binary_name, accuracy, confidence, result_dir, output_dir):
-
-  confidence_list = []
-  
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  for file_name in file_names:
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-    
-    f = open(result_dir + "/" + file_name)
-    tuner_file = open("opentuner_flags", "w+")
-
-    index = 0
-    results_str = ""
-    for x in f:
-      if index >= skip_lines:
-        error_knob = int(float(x.split()[1]))
-        print error_knob
-        tuner_file.write(str(error_knob) + "\n")
-
-      results_str += x
-      index += 1
-      
-    tuner_file.close()
-    
-    run_confidence, avg_accuracy = do_multiple_runs2(binary_name, accuracy, confidence)
-
-    if run_confidence >= 95:
-      f2 = open(output_dir + "/" + file_name, "w+")
-      f2.write("total_runs=" + str(total_runs) + "\t confidence=" + str(run_confidence) + "\t avg_accuracy=" + str(avg_accuracy) + "\n")
-      f2.write(results_str)
-      f2.close()
-
-    conf_result = (run_confidence, avg_accuracy, file_name)
-    confidence_list.append(conf_result) 
-
-  return confidence_list
-
-
-
-def dump_high_confidence_files(binary, result_dir, accuracy, confidence):
-
-  #result_dir = args.result_dir
-  output_dir = result_dir + "/high_confidence"
-  result_dir = result_dir + "/full_results"
-
-  if not os.path.exists(output_dir):
-    os.mkdir(output_dir)
-    
-  confidence_list = compute_confidence(binary, accuracy, confidence, result_dir, output_dir)
-  print confidence_list
-
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[0], reverse=True)
-   
-  output_file = open(output_dir + "/confidence_summary.txt", "w+")
-  for x in sorted_list:
-    output_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[2]) + "\n")    
-
-  output_file.close()
-  print  "Dumped Confidence Summary"
-
-
-  
-
-def processLayerFile(layer_file_path):
-
-  layer_sizes = []
-  layer_file = open(layer_file_path, "r")
-
-  for layer_desc in layer_file:
-    try:
-      toks = layer_desc.split(",")
-      if len(toks) < 2: # Not layer size description
-        continue
-      
-      size = int(toks[1])
-      if "NML" in layer_desc:
-        size = -1
-      layer_sizes.append(size)
-    except:
-      return layer_sizes
-
-  return layer_sizes
-
-
-
-def getLayerConfigStr(config_str, layer_sizes, num_flags):
-
-  new_config_str = ""
-  config_vals = config_str.split(',')
-  it_count = 0
-  layer_count = 0
-  
-  #for layer_size in  val in config_vals:
-  for layer_depth_size in layer_sizes:
-
-    if layer_depth_size == -1:
-      new_config_str += "8"
-      layer_count += 1
-      if layer_count < len(layer_sizes):
-        new_config_str += ","
-      continue
-    
-    val = config_vals[it_count]      
-    if val == "":
-      continue
-    
-    config_val = int(val)
-    # For FP32 and FP32 values, each tensor op needs to be annotated
-    if config_val == 8 or config_val == 9:
-      for i in range(layer_depth_size - 1):
-        new_config_str += val + " "
-      new_config_str += val
-      if layer_count < len(layer_sizes) - 1:
-        new_config_str += ","
-    else:
-      new_config_str += val
-      if layer_count < len(layer_sizes) - 1:
-        new_config_str += ","
-
-    it_count += 1
-    layer_count += 1
-    
-
-  return new_config_str
-
-
-
-def compute_promise_confidence(binary_name, accuracy, confidence, result_dir, output_dir):
-
-  confidence_list = []
-  
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  for file_name in file_names:
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-    
-    f = open(result_dir + "/" + file_name)
-    tuner_file = open("promise_flags", "w+")
-
-    config_str = f.read()
-    tuner_file.write(config_str)  
-    tuner_file.close()
-    
-    run_confidence, avg_accuracy = do_multiple_runs(binary_name, accuracy, confidence)
-
-    if run_confidence >= 95:
-      f2 = open(output_dir + "/" + file_name, "w+")
-      f2.write("total_runs=" + str(total_runs) + "\t confidence=" + str(run_confidence) + "\t avg_accuracy=" + str(avg_accuracy) + "\n")
-      f2.write(config_str)
-      f2.close()
-
-    flags_str = config_str.replace('\n', ',')
-    conf_result = (run_confidence, avg_accuracy, file_name, flags_str)
-    confidence_list.append(conf_result) 
-
-  return confidence_list
-
-
-
-def getConfigCost(layer_costs, config_str):
-
-  tuning_flags = config_str.split("\n")
-  
-  it = 0
-  orig_cost = 0.0
-  total_cost = 0.0
-  for flag in tuning_flags:
-
-    flag_value = -1
-    try:
-      flag_value = int(flag)
-    except:
-      continue
-
-    orig_cost += layer_costs[it]
-
-    #print ("orig_cost = ", orig_cost, " flag_value = ", flag_value) 
-    
-    if flag_value == 11:
-      total_cost += layer_costs[it]
-    elif flag_value == 10:
-      total_cost += (layer_costs[it] / 1.3)
-    elif flag_value == 8 or flag_value == 9:
-      total_cost += (layer_costs[it] / 1.6)
-    elif flag_value < 8:
-      divisor = 5 + (7 - flag_value)
-      total_cost += (layer_costs[it] / divisor)
- 
-    it += 1
-
-  speedup = orig_cost * 1.0 / total_cost * 1.0
-  
-  return total_cost, speedup 
-
-
-
-
-
-def getConfigCost2(layer_costs, knobs_speedup, config_flags):
-
-  orig_cost = 0.0
-  total_cost = 0.0
-  for it in range(len(config_flags)):
-    flag_value = config_flags[it]
-    op_cost = layer_costs[it]
-    speedup = knobs_speedup[flag_value]
-
-    total_cost += (op_cost * 1.0 / speedup * 1.0)
-    orig_cost += op_cost
-    
-    it += 1
-
-  speedup = (orig_cost * 1.0) / (total_cost * 1.0)
-  
-  return total_cost, speedup
-
-
-
-
-
-def compute_promise_confidence2(binary_name, accuracy, confidence, layer_costs,
-                                result_dir, output_dir):
-
-  confidence_list = []
-  
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  for file_name in file_names:
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-    
-    f = open(result_dir + "/" + file_name)
-    tuner_file = open("promise_flags", "w+")
-
-    config_str = f.read()
-    tuner_file.write(config_str)  
-    tuner_file.close()
-    
-    #run_confidence, avg_accuracy = do_multiple_runs(binary_name, accuracy, confidence)
-    run_confidence, avg_accuracy = do_multiple_runs2(binary_name, accuracy, confidence)
-
-    if run_confidence >= 95:    
-      f2 = open(output_dir + "/" + file_name, "w+")
-
-      config_cost, speedup = getConfigCost(layer_costs, config_str)
-      
-      f2.write("total_runs=" + str(total_runs) + "\t confidence=" + str(run_confidence) + "\t avg_accuracy=" + str(avg_accuracy) + "\t config_cost=" + str(config_cost) + "\t speedup=" + str(speedup) +   "\n")
-      f2.write(config_str)
-      f2.close()
-
-    flags_str = config_str.replace('\n', ',')
-    conf_result = (run_confidence, avg_accuracy, file_name, flags_str)
-    confidence_list.append(conf_result) 
-
-  return confidence_list
-
-
-
-
-
-def compute_promise_confidence3(binary_name, accuracy, confidence, layer_costs,
-                                result_dir, output_dir, knobs_speedup):
-
-  confidence_list = []
-  
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  for file_name in file_names:
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-    
-    f = open(result_dir + "/" + file_name)
-    tuner_file = open("promise_flags", "w+")
-
-    config_flags = []
-    config_str = ""
-    it = 0
-    for x in f:
-
-      if it > 0:
-        config_str += x
-        config_flags.append(int(x.strip()))
-        tuner_file.write(x)    
-      it += 1
-
-    tuner_file.close()
-
-    
-    #run_confidence, avg_accuracy = do_multiple_runs(binary_name, accuracy, confidence)
-    run_confidence, avg_accuracy = do_multiple_runs2(binary_name, accuracy, confidence)
-
-    if run_confidence >= 95:    
-      f2 = open(output_dir + "/" + file_name, "w+")
-
-      config_cost, speedup = getConfigCost2(layer_costs, knobs_speedup, config_flags)
-      
-      f2.write("total_runs=" + str(total_runs) + "\t confidence=" + str(run_confidence) + "\t avg_accuracy=" + str(avg_accuracy) + "\t config_cost=" + str(config_cost) + "\t speedup=" + str(speedup) +   "\n")
-      f2.write(config_str)
-      f2.close()
-
-    flags_str = config_str.replace('\n', ',')
-    conf_result = (run_confidence, avg_accuracy, file_name, flags_str)
-    confidence_list.append(conf_result) 
-
-  return confidence_list
-
-
-
-
-def dump_promise_confidence_files(binary, result_dir, layer_file_path,
-                                  num_flags, accuracy, confidence):
-
-  #result_dir = args.result_dir
-  output_dir = result_dir + "/high_confidence"
-  input_dir = result_dir + "/full_results"
-
-  if not os.path.exists(output_dir):
-    os.mkdir(output_dir)    
-
-  layer_sizes = processLayerFile(layer_file_path);
-  print layer_sizes
-  sleep(2)
-    
-  confidence_list = compute_promise_confidence(binary, accuracy, confidence, input_dir, output_dir)
-  print confidence_list
-
-  # Ascending sort on accuracy
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[1])
-   
-  promise_file = open(output_dir + "/promise_confs.txt", "w+")
-  confidence_file = open(output_dir + "/confidence_summary.txt", "w+")
-
-  max_configs = 50
-  it_count = 0
-  for x in sorted_list:
-    if x[1] > accuracy and x[0] > confidence:
-      config_str = getLayerConfigStr(x[3], layer_sizes, num_flags)
-      promise_file.write(config_str + "\n")
-      it_count += 1
-      if it_count > max_configs:
-        break
-       
-    confidence_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[3]) + "\n")    
-    
-  promise_file.close()
-  confidence_file.close()
-  
-  print "Dumped Confidence Summary"
-
-  
-  
-
-
-def dump_promise_confidence_files2(binary, result_dir, layer_file_path,
-                                   num_flags, accuracy, layer_costs, confidence):
-
-
-  #result_dir = args.result_dir
-  output_dir = result_dir + "/high_confidence"
-  input_dir = result_dir + "/full_results"
-
-  if not os.path.exists(output_dir):
-    os.mkdir(output_dir)    
-
-  layer_sizes = processLayerFile(layer_file_path);
-  print layer_sizes
-  sleep(2)
-    
-  confidence_list = compute_promise_confidence2(binary, accuracy, confidence, layer_costs, input_dir, output_dir)
-  print confidence_list
-
-  # Ascending sort on accuracy
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[1])
-   
-  promise_file = open(output_dir + "/promise_confs.txt", "w+")
-  confidence_file = open(output_dir + "/confidence_summary.txt", "w+")
-
-  max_configs = 50
-  it_count = 0
-  for x in sorted_list:
-    if x[1] > accuracy and x[0] > confidence:
-      config_str = getLayerConfigStr(x[3], layer_sizes, num_flags)
-      promise_file.write(config_str + "\n")
-      it_count += 1
-      if it_count > max_configs:
-        break
-       
-    confidence_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[3]) + "\n")    
-    
-  promise_file.close()
-  confidence_file.close()
-  
-  print "Dumped Confidence Summary"
-
-
-
-
-def dump_promise_confidence_files3(binary, input_dir, output_dir, layer_file_path,
-                                   num_flags, accuracy, layer_costs, confidence):
-
-
-  #result_dir = args.result_dir
-  #output_dir = result_dir + "/high_confidence"
-  #input_dir = result_dir + "/full_results"
-
-  if not os.path.exists(output_dir):
-    os.mkdir(output_dir)    
-
-  layer_sizes = processLayerFile(layer_file_path);
-  print layer_sizes
-  sleep(2)
-    
-  confidence_list = compute_promise_confidence2(binary, accuracy, confidence, layer_costs, input_dir, output_dir)
-  print confidence_list
-
-  # Ascending sort on accuracy
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[1])
-   
-  promise_file = open(output_dir + "/promise_confs.txt", "w+")
-  confidence_file = open(output_dir + "/confidence_summary.txt", "w+")
-
-  max_configs = 50
-  it_count = 0
-  for x in sorted_list:
-    if x[1] > accuracy and x[0] > confidence:
-      config_str = getLayerConfigStr(x[3], layer_sizes, num_flags)
-      promise_file.write(config_str + "\n")
-      it_count += 1
-      if it_count > max_configs:
-        break
-       
-    confidence_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[3]) + "\n")    
-    
-  promise_file.close()
-  confidence_file.close()
-  
-  print "Dumped Confidence Summary"
-
-
-
-
-
-def dump_promise_confidence_files4(binary, input_dir, output_dir, layer_file_path,
-                                   num_flags, accuracy, layer_costs, confidence, knobs_speedup):
-
-
-  if not os.path.exists(output_dir):
-    os.mkdir(output_dir)    
-
-  layer_sizes = processLayerFile(layer_file_path);
-  print layer_sizes
-  sleep(2)
-    
-  confidence_list = compute_promise_confidence3(binary, accuracy, confidence, layer_costs, input_dir, output_dir, knobs_speedup)
-  print confidence_list
-
-  # Ascending sort on accuracy
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[1])
-   
-  promise_file = open(output_dir + "/promise_confs.txt", "w+")
-  confidence_file = open(output_dir + "/confidence_summary.txt", "w+")
-
-  max_configs = 50
-  it_count = 0
-  for x in sorted_list:
-    if x[1] > accuracy and x[0] > confidence:
-      config_str = getLayerConfigStr(x[3], layer_sizes, num_flags)
-      promise_file.write(config_str + "\n")
-      it_count += 1
-      if it_count > max_configs:
-        break
-       
-    confidence_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[3]) + "\n")    
-    
-  promise_file.close()
-  confidence_file.close()
-  
-  print "Dumped Confidence Summary"
-
-
-
-  
-
-
-if __name__ == "__main__":
-
-  argparser = argparse.ArgumentParser(description='runs best configs to get high confidence on accuracy')
-  argparser.add_argument('--result-dir', help='Directory containing OpenTuner configurations')
-  argparser.add_argument('--output-dir', help='Directory for storing output directory')
-  argparser.add_argument('--binary', help='Binary name to run')
-  argparser.add_argument('--accuracy', type=float,  help='Accuracy constraint')
-  argparser.add_argument('--confidence', type=float, help='Confidence threshold')
-  
-
-  args = argparser.parse_args()
-  result_dir = args.result_dir
-  output_dir = args.output_dir
-  binary = args.binary
-  accuracy = args.accuracy
-  confidence = args.confidence
-
-  confidence_list = compute_confidence(binary, accuracy, confidence, result_dir, output_dir)
-  #print confidence_list
-
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[0], reverse=True)
-   
-  output_file = open(output_dir + "/confidence_summary.txt", "w+")
-  for x in sorted_list:
-    output_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[2]) + "\n")    
-
-  output_file.close()
-  
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/pareto_curve.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/pareto_curve.py
deleted file mode 100644
index db8233994b855317095c94331fba869d9ad79d16..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/pareto_curve.py
+++ /dev/null
@@ -1,264 +0,0 @@
-
-
-import os
-import shutil
-from measure_confidence2 import getConfigCost
-
-
-AL_THRESHOLD = 0.1
-  
-
-class Config:
-  def __init__(self):
-    self.avg_accuracy = 0
-    self.avg_loss = 0
-    self.speedup = 1
-    self.fname = ""
-    self.flags = []
-
-
-
-
-def skipFile(fname):
-
-  skip_files = {}
-  skip_files["confidence_summary.txt"] = 1
-  skip_files["promise_confs.txt"] = 1
-
-  if "accuracy" in fname:
-    return True
-
-  if fname in skip_files:
-    return True
-  else:
-    return False
-    
-
-  
-    
-def loadConfigData(result_dir, layer_costs, baseline_accuracy):
-
-  config_arr = []
-  
-  #result_dir += "/promise_tuner/high_confidence/"
-  file_names = os.listdir(result_dir)
-
-  
-  for fname in file_names:
-    if not skipFile(fname):
-
-      fpath = result_dir + fname  
-      config = Config()
-      f = open(fpath, "r")
-
-      config_str = f.read()
-      cost, speedup = getConfigCost(layer_costs, config_str)
-
-      config.speedup = speedup
-      config.fname = fname
-
-      fpath2 = fpath + "_accuracy"
-      f2 = open(fpath2, "r")
-      acc_str = f2.read().strip()
-      accuracy = float(acc_str)
-      
-      config.avg_accuracy = accuracy
-      config.avg_loss = baseline_accuracy - accuracy
-   
-      config_arr.append(config)
-        
-
-  return config_arr      
-
-    
-
-
-class Configuration:
-    def __init__(self, name, speedup, energy, accuracy, accuracy_loss):
-        self.name = name
-        self.speedup = speedup
-        self.energy = energy
-        self.accuracy = accuracy
-        self.accuracy_loss = accuracy_loss
-    def __repr__(self):
-        return repr((self.name, self.speedup, self.energy, self.accuracy, self.accuracy_loss))
-
-configuration_objects = [
-    Configuration('conf1', 1.05, 15, 85, 1.2),
-    Configuration('conf2', 2.51, 12, 83, 1.4),
-    Configuration('conf3', 2.05, 10, 84, 0.8),
-]
-
-def compute_pareto_points(configurations):
-    speedupconfigurations = []
-    energyconfigurations = []
-    #sort configurations based on speedup
-    sorted_configurations = sorted(configurations, key=lambda conf: conf.accuracy_loss)
-
-    start_idx = 0
-    while start_idx < len(sorted_configurations):
-        end_idx = start_idx + 1;
-        # find end_idx
-        while end_idx < len(sorted_configurations) and (sorted_configurations[end_idx].accuracy_loss - sorted_configurations[start_idx].accuracy_loss < AL_THRESHOLD) :
-            end_idx += 1
-        # find best speedup end energy in this accuracy loss level
-        sp = -1.0
-        sp_idx = 0
-        en = -1.0
-        en_idx = 0
-        for i in range(start_idx, end_idx):
-            if sorted_configurations[i].speedup > sp:
-                sp = sorted_configurations[i].speedup
-                sp_idx = i
-            if sorted_configurations[i].energy > en:
-                en = sorted_configurations[i].energy
-                en_idx = i
-        sp_not_dominated = True
-        # if not empty list of configurations
-        if speedupconfigurations:
-            if speedupconfigurations[-1].speedup >= sp:
-                sp_not_dominated = False
-        en_not_dominated = True
-        # if not empty list of configurations
-        if energyconfigurations:
-            if energyconfigurations[-1].energy >= en:
-                en_not_dominated = False
-        if sp_not_dominated:
-            speedupconfigurations.append(sorted_configurations[sp_idx])
-        if en_not_dominated:
-            energyconfigurations.append(sorted_configurations[en_idx])
-        # outer while loop variable increment
-        start_idx = end_idx
-    return [speedupconfigurations, energyconfigurations]
-
-
-def compute_pareto_points_with_margin(configurations, speedup_band_width, energy_band_width):
-    speedupconfigurations = []
-    energyconfigurations = []
-    #sort configurations based on speedup
-    sorted_configurations = sorted(configurations, key=lambda conf: conf.accuracy_loss)
-
-    idx_to_sp_conf_dict = {}
-    idx_to_en_conf_dict = {}
-
-    start_idx = 0
-    while start_idx < len(sorted_configurations):
-        end_idx = start_idx + 1;
-        # find end_idx
-        while end_idx < len(sorted_configurations) and (sorted_configurations[end_idx].accuracy_loss - sorted_configurations[start_idx].accuracy_loss < AL_THRESHOLD) :
-            end_idx += 1
-        # find best speedup end energy in this accuracy loss level
-        sp = -1.0
-        sp_idx = 0
-        en = -1.0
-        en_idx = 0
-        for i in range(start_idx, end_idx):
-            if sorted_configurations[i].speedup > sp:
-                sp = sorted_configurations[i].speedup
-                sp_idx = i
-            if sorted_configurations[i].energy < en:
-                en = sorted_configurations[i].energy
-                en_idx = i
-        sp_not_dominated = True
-        # if not empty list of configurations
-        if speedupconfigurations:
-            if speedupconfigurations[-1].speedup >= sp:
-                sp_not_dominated = False
-        en_not_dominated = True
-        # if not empty list of configurations
-        if energyconfigurations:
-            if energyconfigurations[-1].energy >= en:
-                en_not_dominated = False
-        if sp_not_dominated:
-            speedupconfigurations.append(sorted_configurations[sp_idx])
-        idx_to_sp_conf_dict[start_idx] = len(speedupconfigurations)-1
-        if en_not_dominated:
-            energyconfigurations.append(sorted_configurations[en_idx])
-        idx_to_en_conf_dict[start_idx] = len(energyconfigurations)-1
-        # outer while loop variable increment
-        start_idx = end_idx
-
-    # We want to add configurations in a band of a certain width around the curves
-    # not possible to do during contruction, because the quality of the curve would
-    # deteriorate quickly
-
-    AdjustedSpeedupCurve = []
-    AdjustedEnergyCurve = []
-
-    start_idx = 0
-    while start_idx < len(sorted_configurations):
-        end_idx = start_idx + 1;
-        # find end_idx
-        while end_idx < len(sorted_configurations) and (sorted_configurations[end_idx].accuracy_loss - sorted_configurations[start_idx].accuracy_loss < AL_THRESHOLD) :
-            end_idx += 1
-        for i in range(start_idx, end_idx):
-            if sorted_configurations[i].speedup + speedup_band_width >= speedupconfigurations[idx_to_sp_conf_dict[start_idx]].speedup:
-                AdjustedSpeedupCurve.append(sorted_configurations[i])
-            if sorted_configurations[i].energy + energy_band_width >= energyconfigurations[idx_to_en_conf_dict[start_idx]].energy:
-                AdjustedEnergyCurve.append(sorted_configurations[i])
-        # outer while loop variable increment
-        start_idx = end_idx
-
-    return [AdjustedSpeedupCurve, AdjustedEnergyCurve]
-
-
-
-def findParetoConfigs(base_dir, layer_costs, accuracy):
-
-  result_dir = base_dir + "/pareto/"
-  try:
-      os.mkdir(result_dir)
-  except:
-      print "could not create dir"
-
-  input_dir = base_dir + "/full_results/"    
-  #result_dir = "../build_tuner/tuner_results/alexnet_cifar10/loss_3/batch15"
-  config_arr = loadConfigData(input_dir, layer_costs, accuracy)
-
-  config_list = []
-
-  it = 0
-  for config in config_arr:
-    config = Configuration(config.fname , config.speedup, 100, config.avg_accuracy, config.avg_loss)
-    config_list.append(config)
-
-  
-  SPEEDUP_BAND_SIZE = 1.0
-  ENERGY_BAND_SIZE = 10
-
-  # No Pareto Selection if list is < 50 configurations
-  if len(config_list) < 50:
-    SPEEDUP_BAND_SIZE = 100 # Include all in Pareto Frontier
-    
-
-  print ("*SPEEDUP_BAND_SIZE = ", SPEEDUP_BAND_SIZE)
-  
-  ASC, AEC = compute_pareto_points_with_margin(config_list, SPEEDUP_BAND_SIZE, ENERGY_BAND_SIZE)
-
-  
-  print ("len(config_list) = ", len(config_list))
-  print ("len(ASC) = ", len(ASC))
-
-  #print (ASC)
-  #print (config_list)
-
-  for conf in ASC:
-    #dst_path = conf.name.replace("full_results", "pareto")
-    src_path = base_dir + "/full_results/" + conf.name
-    dst_path = base_dir + "/pareto/" + conf.name
-    shutil.copy(src_path, dst_path)
-    
-  
-
-if __name__ == "__main__":
-
-  get_pareto_configs("")
-  
-  #SC, EC = compute_pareto_points(configuration_objects)
-  #ASC, AEC = compute_pareto_points_with_margin(configuration_objects, SPEEDUP_BAND_SIZE, ENERGY_BAND_SIZE)
-
-  #print(SC)
-  #print(EC)
-
-  #print(ASC)
-  #print(AEC)
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/promise_tuner2.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/promise_tuner2.py
deleted file mode 100644
index ca96ff16c2d176b3bb91e213005202634916fc41..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/promise_tuner2.py
+++ /dev/null
@@ -1,220 +0,0 @@
-#!/usr/bin/env python
-#
-# Optimize blocksize of apps/mmm_block.cpp
-#
-# This is an extremely simplified version meant only for tutorials
-#
-import adddeps  # fix sys.path
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-import subprocess
-import threading
-import psutil
-
-from measure_confidence2 import dump_promise_confidence_files
-from select_top_results import select_top_results
-from time import sleep
-
-
-layer_file = ""
-output_dir = ""
-flag_ranges = []
-tuning_flags = []
-binary_name = ""
-accuracy_threshold = 10.0
-evaluated_configs = {}
-orig_result_dir = ""
-gpu_layers = 0
-
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  file.close()
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-    
-  print accuracy
-  return accuracy
-    
-
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(accuracy_threshold)
-    input_manager = FixedInputManager(size=num_flags)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-    
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-
-    # NOTE: Skipping first 'gpu_layers' to run on GPU
-    for flag in tuning_flags[:gpu_layers]:
-      manipulator.add_parameter(
-        EnumParameter(flag, [8, 9]))
-      
-    for flag in tuning_flags[gpu_layers:]:
-      manipulator.add_parameter(
-        EnumParameter(flag, flag_ranges
-                      )) #default is needed, optimizations don't work without it(tried and tested)
-    return manipulator
-
-  
-  def run(self, desired_result, input, limit):
-
-    """
-    Run  a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("promise_flags", cfg)
-    
-    run_cmd = binary_name
-    print "binary_name = ", run_cmd
-    #run_result_call_program = self.call_program(run_cmd)
-    #print "returned \n\n"
-
-    FNULL = open(os.devnull, 'wb')
-    p = subprocess.Popen(run_cmd, stdout = FNULL)
-    p.wait()
-
-       
-    accuracy = getAccuracy("final_accuracy")
-    total_comps = abs(accuracy_threshold - accuracy)
-    
-    
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    Result.accuracy = accuracy
-
-    if accuracy > accuracy_threshold:
-      #if accuracy not in evaluated_configs:
-      config_tuple = (total_comps, accuracy, cfg)
-      self.configs_list.append(config_tuple)
-      evaluated_configs[accuracy] = 1
-      shutil.copy('promise_flags', output_dir + '/' + binary_name + '_' + str(accuracy))
-
-        
-    print "done with one run"
-    
-    return Result
-
-
-  def save_final_config(self, configuration):
-
-    print "Dumping High Confidence results \n"
-    sleep(20)
-    
-    # Only dumping files with 95% confidence
-    dump_promise_confidence_files(binary_name, orig_result_dir, layer_file, num_flags, accuracy_threshold, 95)
-    #select_top_results(orig_result_dir + "/high_confidence")
-
-  
-    
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-
-    return
-
-  
-    
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='path to target binary')
-  argparser.add_argument('--num-flags', type=int, help='num of flags to tune')
-  argparser.add_argument('--start-range', type=int, help='start range in tuning') 
-  argparser.add_argument('--error-range', type=int, help='range of error values used in tuning')
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='result directory')
-  argparser.add_argument('--layer-file', help='layer description')
-  argparser.add_argument('--gpu-layers', type=int, help='first N layers to run on GPU')
-
-  
-  args = argparser.parse_args()
-  binary_name = str(args.binary)
-  print("binary_name = ", binary_name)
-  num_flags = int(args.num_flags)
-  start_range = int(args.start_range)
-  error_range = int(args.error_range)
-  accuracy_threshold = float(args.accuracy)
-  print("accuracy = ", accuracy_threshold)
-  result_dir = args.result_dir
-  orig_result_dir = result_dir
-  if result_dir == "":
-    print("Provide --result-dir ")
-    
-  gpu_layers = args.gpu_layers     
-
-    
-  output_dir = result_dir + "/full_results"
-  print output_dir
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(output_dir):
-    print("Creating output directory = ", output_dir)
-    os.mkdir(output_dir)
-
-  #for j in range(error_range):
-  #  flag_ranges.append(j)
-
-  for j in range(start_range, error_range):
-    flag_ranges.append(j)
-    
-  
-  print("flag_ranges = ", flag_ranges)
-
-  # File with layer description
-  layer_file = args.layer_file
-  
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-  
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/promise_tuner3.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/promise_tuner3.py
deleted file mode 100644
index 04ce0d6158819d5cb014411456e1a985fb17b354..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/promise_tuner3.py
+++ /dev/null
@@ -1,314 +0,0 @@
-#!/usr/bin/env python
-#
-# Optimize blocksize of apps/mmm_block.cpp
-#
-# This is an extremely simplified version meant only for tutorials
-#
-import adddeps  # fix sys.path
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-import subprocess
-import threading
-import psutil
-
-from measure_confidence2 import dump_promise_confidence_files3
-from measure_confidence2 import getConfidence, getMinAccuracy
-from select_top_results import select_top_results
-from time import sleep
-from pareto_curve import findParetoConfigs
-
-
-layer_file = ""
-output_dir = ""
-flag_ranges = []
-tuning_flags = []
-binary_name = ""
-accuracy_threshold = 10.0
-evaluated_configs = {}
-orig_result_dir = ""
-gpu_layers = 0
-
-test_id = 0
-
-layer_costs = []
-
-
-def readCostFile(file_path):
-
-  f = open(file_path)
-  for x in f:
-    cost = float(x.strip())
-    layer_costs.append(cost)
-
-  print ("len(layer_costs) = ", layer_costs)
-  f.close()
-  
-  
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  file.close()
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-    
-  print accuracy
-  return accuracy
-    
-
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-
-  
-def getConfigCost(cfg):
-
-  it = 0
-  total_cost = 0.0
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    if flag_value == 11:
-      total_cost += layer_costs[it]
-    elif flag_value == 10:
-      total_cost += (layer_costs[it] / 1.3)
-    elif flag_value == 8 or flag_value == 9:
-      total_cost += (layer_costs[it] / 1.6)
-    elif flag_value < 8:
-      divisor = 5 + (7 - flag_value)
-      total_cost += (layer_costs[it] / divisor)
-      
-    it += 1
-    
-  return total_cost
-  
-
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(accuracy_threshold)
-    input_manager = FixedInputManager(size=num_flags)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-    
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-
-    flags_arr = []
-    for i in range (8, error_range):
-      flags_arr.append(i)
-      
-    # NOTE: Skipping first 'gpu_layers' to run on GPU
-    for flag in tuning_flags[:gpu_layers]:
-      manipulator.add_parameter(
-        EnumParameter(flag, flags_arr))
-
-    ind = gpu_layers  
-    for flag in tuning_flags[gpu_layers:]:
-      if ind in skip_layers:
-        manipulator.add_parameter(
-        EnumParameter(flag, flags_arr))
-        print ("8 ..... 11")
-      else:
-        manipulator.add_parameter(
-        EnumParameter(flag, flag_ranges
-                      )) #default is needed, optimizations don't work without it(tried and tested)
-        print ("1 .... 11")
-      ind += 1  
-
-      
-    return manipulator
-
-  
-  def run(self, desired_result, input, limit):
-    
-    """
-    Run  a given configuration then
-    return performance
-    """
-    global test_id
-    
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("promise_flags", cfg)
-    
-    run_cmd = binary_name
-    print "\nbinary_name = ", run_cmd
-    #run_result_call_program = self.call_program(run_cmd)
-
-
-    total_runs = 2
-    FNULL = open(os.devnull, 'wb')
-    #p = subprocess.Popen(run_cmd, stdout = FNULL)
-    p = subprocess.Popen([run_cmd, str(total_runs)], stdout = FNULL)
-    p.wait()
-
-       
-    accuracy = getAccuracy("final_accuracy")
-
-    # Get Confidence for multiple runs
-    conf, avg_acc = getConfidence("run_accuracies.txt", accuracy_threshold)  
-    
-    # getConfigCost returns the cost associated with the selected configuration
-    total_comps = getConfigCost(cfg)
-   
-    
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    #Result.accuracy = accuracy
-    min_accuracy = getMinAccuracy("run_accuracies.txt")
-    print ("min_accuracy = ", min_accuracy)
-    Result.accuracy = min_accuracy
-    
-    # Only pass conf if conf == 100
-    if min_accuracy > accuracy_threshold and conf == 100:
-      print ("conf = ", conf, " avg_acc = ", avg_acc)
-      #if accuracy not in evaluated_configs:
-      config_tuple = (total_comps, accuracy, cfg)
-      self.configs_list.append(config_tuple)
-      evaluated_configs[accuracy] = 1
-      shutil.copy('promise_flags', output_dir + '/' + binary_name + '_' + str(test_id))
-
-      f_acc = open(output_dir + '/' + binary_name + '_' + str(test_id) + "_accuracy", "w")
-      f_acc.write(str(accuracy))
-      f_acc.close()
-                   
-      
-    test_id += 1
-    
-    return Result
-
-
-  def save_final_config(self, configuration):
-
-    print "Dumping High Confidence results \n"
-    sleep(2)
-
-
-    findParetoConfigs(orig_result_dir, layer_costs, accuracy_threshold)
-
-    input_dir = orig_result_dir + "/pareto/"
-    output_dir = orig_result_dir + "/high_confidence/"
-    
-    # Only dumping files with 95% confidence
-    dump_promise_confidence_files3(binary_name, input_dir, output_dir, layer_file, num_flags, accuracy_threshold, layer_costs, 95)
-    #select_top_results(orig_result_dir + "/high_confidence")
-  
-    
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-
-    return
-
-  
-
-error_range = 11
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='path to target binary')
-  argparser.add_argument('--num-flags', type=int, help='num of flags to tune')
-  argparser.add_argument('--start-range', type=int, help='start range in tuning') 
-  argparser.add_argument('--error-range', type=int, help='range of error values used in tuning')
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='result directory')
-  argparser.add_argument('--layer-file', help='layer description')
-  argparser.add_argument('--cost-file', help='layer description')
-  argparser.add_argument('--gpu-layers', type=int, help='first N layers to run on GPU')
-  argparser.add_argument('--skip-layers', help='layer IDs to run on GPU')
-
-  
-  args = argparser.parse_args()
-  binary_name = str(args.binary)
-  print("binary_name = ", binary_name)
-  num_flags = int(args.num_flags)
-  start_range = int(args.start_range)
-  error_range = int(args.error_range)
-  accuracy_threshold = float(args.accuracy)
-  print("accuracy = ", accuracy_threshold)
-  result_dir = args.result_dir
-  orig_result_dir = result_dir
-  if result_dir == "":
-    print("Provide --result-dir ")
-    
-  gpu_layers = args.gpu_layers
-  skip_layers_str = args.skip_layers
-
-  skip_layers = []
-  layer_ids = skip_layers_str.split("_")
-  for layer_id in layer_ids:
-    skip_layers.append(int(layer_id))
-
-  print ("skip_layers = ", skip_layers)
-
-  # NOTE: Reading the cost file (with No of ops) to better guide the Autotuner
-  cost_file_path = args.cost_file
-  readCostFile(cost_file_path)
-  
-    
-  output_dir = result_dir + "/full_results"
-  print output_dir
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(output_dir):
-    print("Creating output directory = ", output_dir)
-    os.mkdir(output_dir)
-
-  #for j in range(error_range):
-  #  flag_ranges.append(j)
-
-  for j in range(start_range, error_range):
-    flag_ranges.append(j)
-    
-  
-  print("flag_ranges = ", flag_ranges)
-
-  # File with layer description
-  layer_file = args.layer_file
-  
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-  
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/promise_tuner_piped.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/promise_tuner_piped.py
deleted file mode 100644
index cf84c503b09b6b74474cd4730d93aabd34b5ee2a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/promise_tuner_piped.py
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/usr/bin/env python
-#
-# Optimize blocksize of apps/mmm_block.cpp
-#
-# This is an extremely simplified version meant only for tutorials
-#
-import adddeps  # fix sys.path
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-import subprocess
-import threading
-import psutil
-
-from measure_confidence import dump_promise_confidence_files
-from select_top_results import select_top_results
-from time import sleep
-
-
-layer_file = ""
-output_dir = ""
-flag_ranges = []
-tuning_flags = []
-binary_name = ""
-accuracy_threshold = 10.0
-evaluated_configs = {}
-orig_result_dir = ""
-
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  file.close()
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-    
-  print accuracy
-  return accuracy
-
-
-
-def kill(proc_pid):
-  process = psutil.Process(proc_pid)
-  for proc in process.children(recursive=True):
-    proc.kill()
-  process.kill()
-    
-
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(accuracy_threshold)
-    input_manager = FixedInputManager(size=num_flags)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-    FNULL = open(os.devnull, 'wb')
-    #run_result_call_program = self.call_program(run_cmd)
-    self.start_process = subprocess.Popen([binary_name, "opentuner_run"] ,  stdout=FNULL);
-
-    try:
-      os.mkfifo("/tmp/myfifo")
-    except OSError, e:
-      print("FIFO exists")
-
-    
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-    for flag in tuning_flags:
-      manipulator.add_parameter(
-        EnumParameter(flag, flag_ranges
-                      # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-                      )) #default is needed, optimizations don't work without it(tried and tested)
-    return manipulator
-
-  
-  def run(self, desired_result, input, limit):
-
-    """
-    Run  a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("opentuner_flags", cfg)
-    
-    run_cmd = binary_name
-    print run_cmd
-    #run_result_call_program = self.call_program(run_cmd)      
-  
-    # Using Named Pipes to signal execution to the DNN outer thread
-    fifo = open("/tmp/myfifo", "w")
-    fifo.write("start_run")
-    fifo.close()
-
-    print "Waiting for process to signal back - when done processing one run"
-
-    fifo2 = open("/tmp/myfifo", "r")
-    fifo2.read()
-    fifo2.close()
-
-    print "Process Signalled back"
-
-    accuracy = getAccuracy("final_accuracy")
-    total_comps = abs(accuracy_threshold - accuracy)
-
-    
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    Result.accuracy = accuracy
-
-    if accuracy > accuracy_threshold:
-      #if accuracy not in evaluated_configs:
-      config_tuple = (total_comps, accuracy, cfg)
-      self.configs_list.append(config_tuple)
-      evaluated_configs[accuracy] = 1
-      shutil.copy('opentuner_flags', output_dir + '/' + binary_name + '_' + str(accuracy))
-
-        
-    print "done with one run"
-    
-    return Result
-
-
-  def save_final_config(self, configuration):
-
-    print "Dumping High Confidence results \n"
-    sleep(20)
-    
-    # Only dumping files with 95% confidence
-    dump_promise_confidence_files(binary_name, orig_result_dir, layer_file, num_flags, accuracy_threshold, 95)
-    #select_top_results(orig_result_dir + "/high_confidence")
-
-    
-    #self.start_process.kill()
-    kill(self.start_process.pid)
-    
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-
-    return
-
-  
-    
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='path to target binary')
-  argparser.add_argument('--num-flags', type=int, help='num of flags to tune')
-  argparser.add_argument('--error-range', type=int, help='range of error values used in tuning') 
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='result directory')
-  argparser.add_argument('--layer-file', help='layer description')
-
-  
-  args = argparser.parse_args()
-  binary_name = str(args.binary)
-  print("binary_name = ", binary_name)
-  num_flags = int(args.num_flags)
-  error_range = int(args.error_range)
-  accuracy_threshold = float(args.accuracy)
-  print("accuracy = ", accuracy_threshold)
-  result_dir = args.result_dir
-  orig_result_dir = result_dir
-  if result_dir == "":
-    print("Provide --result-dir ")
-
-
-  output_dir = result_dir + "/full_results"
-  print output_dir
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(output_dir):
-    print("Creating output directory = ", output_dir)
-    os.mkdir(output_dir)
-
-  for j in range(error_range):
-    flag_ranges.append(j)
-
-  print("flag_ranges = ", flag_ranges)
-
-  # File with layer description
-  layer_file = args.layer_file
-  
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-  
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/psnr_tuner.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/psnr_tuner.py
deleted file mode 100644
index 7d9f601819f985877b6acdd4f1a5ff71f6b74113..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/psnr_tuner.py
+++ /dev/null
@@ -1,323 +0,0 @@
-#!/usr/bin/env python
-#
-# Algorithmic Approximation Tuning
-# Purpose: Tunes for Perforation, Sampling, Numerical Precision (FP16)
-
-
-import adddeps  
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-import subprocess
-import threading
-import psutil
-
-from measure_confidence2 import dump_promise_confidence_files3
-from measure_confidence2 import getConfidence, getMinAccuracy
-from select_top_results import select_top_results
-from time import sleep
-from pareto_curve import findParetoConfigs
-
-
-
-
-class TunerData:
-  def __init__(self):
-    self.binary_path = ""
-    self.output_dir = ""
-    self.num_layers = 0
-    self.knobs_list = []
-    self.knobs_speedup = {}
-    self.accuracy_threshold = 0
-    self.test_id = 0
-    self.layer_costs = []
-    self.tuning_flags = []
-    self.autotuner_runs = 0
-    
-    
-
-
-tunerData = TunerData()
-
-
-
-
-def readCostFile(file_path):
-
-  layer_costs = []
-  f = open(file_path)
-  for x in f:
-    cost = float(x.strip())
-    layer_costs.append(cost)
-
-  print ("len(layer_costs) = ", layer_costs)
-  f.close()
-
-  return layer_costs
-
-  
-
-def getPSNR_violation(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  file.close()
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-    
-  print (accuracy)
-  return accuracy
-    
-
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for i in range(tunerData.num_layers):  # flag in tunerData.tuning_flags:
-    flag = tunerData.tuning_flags[i]
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-  
-
-def readLayerKnobs(file_path):
-
-  f = open(file_path, "r")
-  knobs_list = []
-  for x in f:
-    knobs = []
-    vals = x.split(",")
-    for val in vals:
-      knobs.append(int(val))
-      
-    knobs_list.append(knobs)
-
-  print ("knobs_list = ", knobs_list)
-  
-  return knobs_list
-
-
-
-def readKnobConfig(file_path):
-
-  knobs_speedup = {}
-  f = open(file_path, "r")
-  for x in f:
-    toks = x.split("\t")
-    ID = int(toks[0].split(",")[1])
-
-    speedup = float(toks[2])
-    knobs_speedup[ID] = speedup
-  
-  print ("knobs_speedup = ", knobs_speedup)
-  
-  return knobs_speedup
-
-
-
-
-def getConfigCost(cfg):
-
-  orig_cost = 0.0
-  total_cost = 0.0
-  for it in range(tunerData.num_layers):
-    flag = tunerData.tuning_flags[it]
-    flag_value = cfg[flag]
-    op_cost = tunerData.layer_costs[it]
-    speedup = tunerData.knobs_speedup[flag_value]
-
-    total_cost += (op_cost * 1.0 / speedup * 1.0)
-    orig_cost += op_cost
-    
-    it += 1
-
-  speedup = (orig_cost * 1.0) / (total_cost * 1.0)
-  
-  return total_cost, speedup
-
-
-
-def appendTopLine(f_path, accuracy, total_runs, total_comps, speedup):
-
-  f_str = open(f_path, "r").read()
-
-  f_out = open(f_path, "w+")
-  f_out.write("avg_accuracy=" + str(accuracy) + "\tconfig_cost=" + str(total_comps) + "\tspeedup=" + str(speedup) + "\n" )
-  f_out.write(f_str)
-
-  f_out.close()
-      
-
-  
-def dumpAccuracyFile(accuracy):
-  
-  f_acc = open(tunerData.output_dir + '/' + tunerData.binary_path + '_' + str(tunerData.test_id) + "_accuracy", "w")
-  f_acc.write(str(accuracy))
-  f_acc.close()
- 
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(tunerData.accuracy_threshold)
-    input_manager = FixedInputManager(size=tunerData.num_layers)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-    
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-
-    for i in range(tunerData.num_layers):
-      tunerData.tuning_flags.append("flag" + str(i))
-
-         
-    #for flag in tunerData.tuning_flags:
-    for ind in range(tunerData.num_layers):
-        flag = tunerData.tuning_flags[ind]
-
-        manipulator.add_parameter(EnumParameter(flag, tunerData.knobs_list[ind]))
-        print ("ind = ", ind, " len = ", len(tunerData.knobs_list))
-        print (tunerData.knobs_list[ind])
-          
-        ind += 1  
-      
-    return manipulator
-
-  
-  
-  def run(self, desired_result, input, limit):
-    
-    """
-    Run  a given configuration then
-    return performance
-    """
-    global test_id
-    
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("promise_flags", cfg)
-    
-    run_cmd = tunerData.binary_path
-    print "\nbinary_path = ", run_cmd
-
-    input_size = 5000
-    offset = 5000
-
-    total_runs = 1 # NOTE: Single run sufficient in Algorithmic Approx Tuner
-    FNULL = open(os.devnull, 'wb')
-    p = subprocess.Popen([run_cmd, str(total_runs), str(tunerData.accuracy_threshold), str(1), str(input_size), str(offset) ], stdout = FNULL)
-    p.wait()
-
-       
-    PSNR_violation = getPSNR_violation("final_accuracy")
-    
-    # getConfigCost returns the cost associated with the selected configuration
-    total_comps, speedup = getConfigCost(cfg)
-   
-    
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    #Result.accuracy = accuracy
-
-    # NOTE: Fixing Violation Rate at 5% threshold
-    if PSNR_violation < 5:
-      config_tuple = (total_comps, PSNR_violation, cfg)
-      self.configs_list.append(config_tuple)
-      f_path = tunerData.output_dir + '/' + tunerData.binary_path + '_' + str(tunerData.test_id)
-      shutil.copy('promise_flags', f_path)
-
-      appendTopLine(f_path, PSNR_violation, total_runs, total_comps, speedup)
-
-      # dumpAccuracyFile(accuracy)
-                   
-      
-    tunerData.test_id += 1
-    
-    return Result
-
-
-  def save_final_config(self, configuration):
-
-    print "Done with Autotuning Run \n"
-    sleep(2)
-
-    print "Final configuration", configuration.data
-
-    return
-
-  
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='path to target binary')
-  argparser.add_argument('--num-layers', type=int, help='num of flags to tune')
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='result directory')
-  argparser.add_argument('--cost-file', help='layer description')
-  argparser.add_argument('--knobs-config', help='knob settings and ID mapping')
-  argparser.add_argument('--layer-knobs', help='per-layer Knobs')
-  
-  
-  args = argparser.parse_args()
-
-  tunerData.binary_path = str(args.binary)
-  tunerData.num_layers = int(args.num_layers)
-  tunerData.accuracy_threshold = float(args.accuracy)
-
-  # NOTE: Reading the cost file (with No of ops) to better guide the Autotuner
-  cost_file_path = args.cost_file
-  tunerData.layer_costs = readCostFile(cost_file_path)
-  
-  tunerData.knobs_list = readLayerKnobs(args.layer_knobs)
-  tunerData.knobs_speedup = readKnobConfig(args.knobs_config)
-
-  
-  result_dir = args.result_dir
-  if result_dir == "":
-    print("Provide --result-dir ")
-        
-  tunerData.output_dir = result_dir + "/high_confidence/"
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(tunerData.output_dir):
-    print("Creating output directory = ", tunerData.output_dir)
-    os.mkdir(tunerData.output_dir)
-
-
-    
-  ClangFlagsTuner.main(argparser.parse_args())
-
-  
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/select_top_results.py b/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/select_top_results.py
deleted file mode 100644
index 7ee878e5f8f84f3f56ea982c1f933b2c1a5b914b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/autotuner/select_top_results.py
+++ /dev/null
@@ -1,101 +0,0 @@
-
-
-import argparse
-import sys
-import os
-
-
-log_index = 9
-linear_index = 10
-quad_index = 11
-
-top_k = 10
-skip_lines = 1
-
-
-def dump_results(sorted_list, k, result_dir, sub_dir):
-
-  ref_dir = result_dir + "/" + sub_dir
-  if not os.path.exists(ref_dir):
-    os.mkdir(ref_dir)
-  
-  for i in range(min(k, len(sorted_list)) ):
-    file_name = sorted_list[i][1]
-    file_name = ref_dir + "/" + file_name + "_rank_" + str(i)
-    f = open(file_name, "w+")
-    f.write(str(sorted_list[i][2]) + "\t")
-    f.write(str(sorted_list[i][3]) + "\t")
-    f.write(str(sorted_list[i][4]) + "\n")
-    f.write(sorted_list[i][0])
-    f.close()
-
-    
-    
-
-def select_top_results(result_dir):
-
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  results_arr = []
-  
-  for file_name in file_names:
-
-    if file_name == "confidence_summary.txt":
-      continue
-    
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-
-    log_result = 0.0
-    linear_result = 0.0
-    quad_result = 0.0
-    file_str = ""
-    
-    index = 0
-    f = open(result_dir + "/" + file_name)
-    for x in f:
-      if index >= skip_lines:
-        words = x.split()
-        log_result += float(words[log_index])
-        linear_result += float(words[linear_index])
-        quad_result += float(words[quad_index])
-        file_str += x 
-
-      index += 1
-
-
-    file_result = (file_str, file_name, log_result, linear_result, quad_result)          
-    results_arr.append(file_result)    
-
-    
-  sorted_list = sorted(results_arr, key = lambda tup: tup[2])
-  dump_results(sorted_list, top_k, result_dir, "log")
-
-  sorted_list = sorted(results_arr, key = lambda tup: tup[3])
-  dump_results(sorted_list, top_k, result_dir, "linear")
-
-  sorted_list = sorted(results_arr, key = lambda tup: tup[4])
-  dump_results(sorted_list, top_k, result_dir, "quad")
-
-
-#def select_top_configuration(result_dir):
-  
-
-if __name__ == "__main__":
-
-  argparser = argparse.ArgumentParser(description='runs best configs to get high confidence on accuracy')
-  argparser.add_argument('--result-dir', help='Directory containing OpenTuner configurations')
-
-  args = argparser.parse_args()
-  result_dir = args.result_dir
-
-  select_top_results(result_dir)
-  
-
-    
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet/knobs.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet/knobs.txt
deleted file mode 100644
index 050fc6118045090b4a5cc442105181f56d693a77..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet/knobs.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet/op_cost.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet/op_cost.txt
deleted file mode 100644
index 04336fca2708d5e5d78849e1c12014f5ddbd1ad7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet/op_cost.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-11894784.000000
-39321600.000000
-21233664.000000
-28311552.000000
-18874368.000000
-20480.000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet2/knobs.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet2/knobs.txt
deleted file mode 100644
index c873eeddcdeaa44fe0365bdb5e3292997d0074b6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet2/knobs.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet2/op_cost.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet2/op_cost.txt
deleted file mode 100644
index 5a5722f202dde469dca94c71dd9c5fc1cd7aa32b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/alexnet2/op_cost.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-88473.601562
-943718.375000
-471859.187500
-943718.375000
-471859.187500
-943718.375000
-2048.000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/global_knobs.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/global_knobs.txt
deleted file mode 100644
index 54a09e08530f13f5435cdb6d4cd48a2b67ef2b38..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/global_knobs.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-fp32,11	-1	1.0	tensorConvolution	tensorConvolution
-fp16,12	-1	1.5	tensorConvolution	tensorHalfConvolution
-perf,21	1,2,0	2.25	tensorConvolution	tensorConvPerfCuda
-perf,22	1,2,1	2.25	tensorConvolution	tensorConvPerfCuda
-perf,23	1,3,0	1.88	tensorConvolution	tensorConvPerfCuda
-perf,24	1,3,1	1.88	tensorConvolution	tensorConvPerfCuda
-perf,25	1,3,2	1.88	tensorConvolution	tensorConvPerfCuda
-perf,26	2,1,0	2.25	tensorConvolution	tensorConvPerfCuda
-perf,27	2,1,1	2.25	tensorConvolution	tensorConvPerfCuda
-perf,28	3,1,0	1.88	tensorConvolution	tensorConvPerfCuda
-perf,29	3,1,1	1.88	tensorConvolution	tensorConvPerfCuda
-perf,30	3,1,2	1.88	tensorConvolution	tensorConvPerfCuda
-samp,31	2,0	2.25	tensorConvolution	tensorConvSampSim
-samp,32	2,1	2.25	tensorConvolution	tensorConvSampSim
-samp,33	4,0	1.8	tensorConvolution	tensorConvSampSim
-samp,34	4,1	1.8	tensorConvolution	tensorConvSampSim
-samp,35	4,2	1.8	tensorConvolution	tensorConvSampSim
-samp,36	4,3	1.8	tensorConvolution	tensorConvSampSim
-reduction_samp,41	1	1.5
-reduction_samp,42	1	2.25
-reduction_samp,43	1	1.4
-reduction_samp,44	1	2
-reduction_samp,45	1	1.25
-reduction_samp,46	1	1.8
-swing_level,1	1	12
-swing_level,2	1	10
-swing_level,3	1	9
-swing_level,4	1	8
-swing_level,5	1	6
-swing_level,6	1	5
-swing_level,7	1	4
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/global_knobs_dnn.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/global_knobs_dnn.txt
deleted file mode 100644
index 2180997527410cfdbf577a116fd39a592e2af05b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/global_knobs_dnn.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-fp32,11	-1	1.0	tensorConvolution	tensorConvolution
-fp16,12	-1	1.5	tensorConvolution	tensorHalfConvolution
-perf,21	1,2,0	2.25	tensorConvolution	tensorConvPerfCuda
-perf,22	1,2,1	2.25	tensorConvolution	tensorConvPerfCuda
-perf,23	1,3,0	1.88	tensorConvolution	tensorConvPerfCuda
-perf,24	1,3,1	1.88	tensorConvolution	tensorConvPerfCuda
-perf,25	1,3,2	1.88	tensorConvolution	tensorConvPerfCuda
-perf,26	2,1,0	2.25	tensorConvolution	tensorConvPerfCuda
-perf,27	2,1,1	2.25	tensorConvolution	tensorConvPerfCuda
-perf,28	3,1,0	1.88	tensorConvolution	tensorConvPerfCuda
-perf,29	3,1,1	1.88	tensorConvolution	tensorConvPerfCuda
-perf,30	3,1,2	1.88	tensorConvolution	tensorConvPerfCuda
-samp,31	2,0	2.25	tensorConvolution	tensorConvSampSim
-samp,32	2,1	2.25	tensorConvolution	tensorConvSampSim
-samp,33	4,0	1.8	tensorConvolution	tensorConvSampSim
-samp,34	4,1	1.8	tensorConvolution	tensorConvSampSim
-samp,35	4,2	1.8	tensorConvolution	tensorConvSampSim
-samp,36	4,3	1.8	tensorConvolution	tensorConvSampSim
-swing_level,1	1	12
-swing_level,2	1	10
-swing_level,3	1	9
-swing_level,4	1	8
-swing_level,5	1	6
-swing_level,6	1	5
-swing_level,7	1	4
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/lenet/knobs.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/lenet/knobs.txt
deleted file mode 100644
index 8973c89f7a89f9c62c12f8371d16eebad7264b31..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/lenet/knobs.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12
-12
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/lenet/op_cost.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/lenet/op_cost.txt
deleted file mode 100644
index 74b1b668e2f27f3ddb77dcac7fff9890c70a6f02..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/lenet/op_cost.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-62720.000000
-1003520.000000
-321126.406250
-1024.000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet/knobs.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet/knobs.txt
deleted file mode 100644
index 900ad3944d5203d4552a75140358388c99bea181..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet/knobs.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet/op_cost.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet/op_cost.txt
deleted file mode 100644
index 673e704b7e37e19c090e98799189a4411bad9f7c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet/op_cost.txt
+++ /dev/null
@@ -1,28 +0,0 @@
-88473.601562
-29491.199219
-209715.203125
-14745.599609
-209715.203125
-29491.199219
-419430.406250
-7372.799805
-209715.203125
-14745.599609
-419430.406250
-3686.399902
-209715.203125
-7372.799805
-419430.406250
-7372.799805
-419430.406250
-7372.799805
-419430.406250
-7372.799805
-419430.406250
-7372.799805
-419430.406250
-1843.199951
-209715.203125
-3686.399902
-419430.406250
-1024.000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet_shallow/knobs.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet_shallow/knobs.txt
deleted file mode 100644
index c7273f3fc6e487ada58eaed7bc036f707c3ce541..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet_shallow/knobs.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet_shallow/op_cost.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet_shallow/op_cost.txt
deleted file mode 100644
index 7266441905a08c1ef1796dec8ee6c05660998378..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/mobilenet_shallow/op_cost.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-265420.812500
-629145.625000
-629145.625000
-1258291.250000
-629145.625000
-1258291.250000
-629145.625000
-6144.000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/resnet/knobs.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/resnet/knobs.txt
deleted file mode 100644
index eadcb5ebff73feb75b9f7533f7703252ab895afc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/resnet/knobs.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/resnet/op_cost.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/resnet/op_cost.txt
deleted file mode 100644
index fdba070cfc5eac559c8384306993fb52a1eb2e04..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/resnet/op_cost.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-44236.800781
-235929.593750
-235929.593750
-235929.593750
-235929.593750
-235929.593750
-235929.593750
-117964.796875
-235929.593750
-13107.200195
-235929.593750
-235929.593750
-235929.593750
-235929.593750
-117964.796875
-235929.593750
-13107.200195
-235929.593750
-235929.593750
-235929.593750
-235929.593750
-64.000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar10/knobs.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar10/knobs.txt
deleted file mode 100644
index d238fa1036729f79cc66bdaa14667dcf16c60a9a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar10/knobs.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12
-12
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar10/op_cost.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar10/op_cost.txt
deleted file mode 100644
index 5f58ebcc043915d28cf874a1f67e5b2637db1dfc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar10/op_cost.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-88473.601562
-1887436.750000
-943718.375000
-1887436.750000
-943718.375000
-1887436.750000
-1887436.750000
-943718.375000
-1887436.750000
-1887436.750000
-471859.187500
-471859.187500
-471859.187500
-13107.200195
-256.000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar100/knobs.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar100/knobs.txt
deleted file mode 100644
index d238fa1036729f79cc66bdaa14667dcf16c60a9a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar100/knobs.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36
-12
-12
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar100/op_cost.txt b/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar100/op_cost.txt
deleted file mode 100644
index 8c6daad2e2902e3ac821d99ebbe12e21b6428cc7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/data/vgg16_cifar100/op_cost.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-884736.000000
-18874368.000000
-9437184.000000
-18874368.000000
-9437184.000000
-18874368.000000
-18874368.000000
-9437184.000000
-18874368.000000
-18874368.000000
-4718592.000000
-4718592.000000
-4718592.000000
-131072.000000
-25600.000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/debian-packages-deps b/llvm/projects/hpvm-tensor-rt/opentuner/debian-packages-deps
deleted file mode 100644
index ea49289a875cfe80df1de02307e03f7791c00adf..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/debian-packages-deps
+++ /dev/null
@@ -1,9 +0,0 @@
-build-essential
-git
-gnuplot
-libfreetype6-dev
-libpng-dev
-libsqlite3-dev
-python-dev
-python-pip
-sqlite3
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/docs/Makefile b/llvm/projects/hpvm-tensor-rt/opentuner/docs/Makefile
deleted file mode 100644
index 1c028b3a91e5750dc927f6a865923ca2e9ac141a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/docs/Makefile
+++ /dev/null
@@ -1,177 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS    =
-SPHINXBUILD   = sphinx-build
-PAPER         =
-BUILDDIR      = build
-
-# User-friendly check for sphinx-build
-ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
-$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
-endif
-
-# Internal variables.
-PAPEROPT_a4     = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
-
-help:
-	@echo "Please use \`make <target>' where <target> is one of"
-	@echo "  html       to make standalone HTML files"
-	@echo "  dirhtml    to make HTML files named index.html in directories"
-	@echo "  singlehtml to make a single large HTML file"
-	@echo "  pickle     to make pickle files"
-	@echo "  json       to make JSON files"
-	@echo "  htmlhelp   to make HTML files and a HTML help project"
-	@echo "  qthelp     to make HTML files and a qthelp project"
-	@echo "  devhelp    to make HTML files and a Devhelp project"
-	@echo "  epub       to make an epub"
-	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
-	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
-	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
-	@echo "  text       to make text files"
-	@echo "  man        to make manual pages"
-	@echo "  texinfo    to make Texinfo files"
-	@echo "  info       to make Texinfo files and run them through makeinfo"
-	@echo "  gettext    to make PO message catalogs"
-	@echo "  changes    to make an overview of all changed/added/deprecated items"
-	@echo "  xml        to make Docutils-native XML files"
-	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
-	@echo "  linkcheck  to check all external links for integrity"
-	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
-
-clean:
-	rm -rf $(BUILDDIR)/*
-
-html:
-	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-dirhtml:
-	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
-	@echo
-	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-singlehtml:
-	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
-	@echo
-	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
-
-pickle:
-	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
-	@echo
-	@echo "Build finished; now you can process the pickle files."
-
-json:
-	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
-	@echo
-	@echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
-	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
-	@echo
-	@echo "Build finished; now you can run HTML Help Workshop with the" \
-	      ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-qthelp:
-	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
-	@echo
-	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
-	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
-	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/OpenTuner.qhcp"
-	@echo "To view the help file:"
-	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/OpenTuner.qhc"
-
-devhelp:
-	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
-	@echo
-	@echo "Build finished."
-	@echo "To view the help file:"
-	@echo "# mkdir -p $$HOME/.local/share/devhelp/OpenTuner"
-	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/OpenTuner"
-	@echo "# devhelp"
-
-epub:
-	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
-	@echo
-	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
-latex:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo
-	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
-	@echo "Run \`make' in that directory to run these through (pdf)latex" \
-	      "(use \`make latexpdf' here to do that automatically)."
-
-latexpdf:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through pdflatex..."
-	$(MAKE) -C $(BUILDDIR)/latex all-pdf
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-latexpdfja:
-	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
-	@echo "Running LaTeX files through platex and dvipdfmx..."
-	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
-	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-text:
-	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
-	@echo
-	@echo "Build finished. The text files are in $(BUILDDIR)/text."
-
-man:
-	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
-	@echo
-	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
-
-texinfo:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo
-	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
-	@echo "Run \`make' in that directory to run these through makeinfo" \
-	      "(use \`make info' here to do that automatically)."
-
-info:
-	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
-	@echo "Running Texinfo files through makeinfo..."
-	make -C $(BUILDDIR)/texinfo info
-	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
-
-gettext:
-	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
-	@echo
-	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
-
-changes:
-	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
-	@echo
-	@echo "The overview file is in $(BUILDDIR)/changes."
-
-linkcheck:
-	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
-	@echo
-	@echo "Link check complete; look for any errors in the above output " \
-	      "or in $(BUILDDIR)/linkcheck/output.txt."
-
-doctest:
-	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
-	@echo "Testing of doctests in the sources finished, look at the " \
-	      "results in $(BUILDDIR)/doctest/output.txt."
-
-xml:
-	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
-	@echo
-	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
-
-pseudoxml:
-	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
-	@echo
-	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/docs/rtd-requirements.txt b/llvm/projects/hpvm-tensor-rt/opentuner/docs/rtd-requirements.txt
deleted file mode 100644
index e30d149ed5e3356ff54d915b556dfaed0dfb6148..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/docs/rtd-requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-argparse>=1.2.1
-django==1.6.1
-fn>=0.2.12
-SQLAlchemy>=0.8.2
-virtualenv==1.9.1
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/conf.py b/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/conf.py
deleted file mode 100644
index a27fabf403e0e7f6081d906819167e94eb236b61..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/conf.py
+++ /dev/null
@@ -1,261 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# OpenTuner documentation build configuration file, created by
-# sphinx-quickstart on Sat Jan  3 04:13:12 2015.
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-import sys
-import os
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-sys.path.insert(0, os.path.abspath('../..'))
-
-# -- General configuration ------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
-
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = [
-    'sphinx.ext.autodoc',
-    'sphinx.ext.pngmath',
-]
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.rst'
-
-# The encoding of source files.
-#source_encoding = 'utf-8-sig'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'OpenTuner'
-copyright = u'2015, Jason Ansel'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = '0.0'
-# The full version, including alpha/beta/rc tags.
-release = '0.0'
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-exclude_patterns = []
-
-# The reST default role (used for this markup: `text`) to use for all
-# documents.
-#default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
-
-# If true, keep warnings as "system message" paragraphs in the built documents.
-#keep_warnings = False
-
-
-# -- Options for HTML output ----------------------------------------------
-
-# The theme to use for HTML and HTML Help pages.  See the documentation for
-# a list of builtin themes.
-html_theme = 'default'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further.  For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = []
-
-# The name for this set of Sphinx documents.  If None, it defaults to
-# "<project> v<release> documentation".
-#html_title = None
-
-# A shorter title for the navigation bar.  Default is the same as html_title.
-#html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#html_logo = None
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-#html_favicon = None
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-# Add any extra paths that contain custom files (such as robots.txt or
-# .htaccess) here, relative to this directory. These files are copied
-# directly to the root of the documentation.
-#html_extra_path = []
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-#html_domain_indices = True
-
-# If false, no index is generated.
-#html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it.  The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'OpenTunerdoc'
-
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-#  author, documentclass [howto, manual, or own class]).
-latex_documents = [
-  ('index', 'OpenTuner.tex', u'OpenTuner Documentation',
-   u'Jason Ansel', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# If true, show page references after internal links.
-#latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_domain_indices = True
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
-    ('index', 'opentuner', u'OpenTuner Documentation',
-     [u'Jason Ansel'], 1)
-]
-
-# If true, show URL addresses after external links.
-#man_show_urls = False
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-#  dir menu entry, description, category)
-texinfo_documents = [
-  ('index', 'OpenTuner', u'OpenTuner Documentation',
-   u'Jason Ansel', 'OpenTuner', 'One line description of project.',
-   'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
-
-# If false, no module index is generated.
-#texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
-
-# If true, do not generate a @detailmenu in the "Top" node's menu.
-#texinfo_no_detailmenu = False
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/index.rst b/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/index.rst
deleted file mode 100644
index 48f7468982f559d60ef98736539567fa0a320ec3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/index.rst
+++ /dev/null
@@ -1,27 +0,0 @@
-.. OpenTuner documentation master file, created by
-   sphinx-quickstart on Sat Jan  3 04:13:12 2015.
-   You can adapt this file completely to your liking, but it should at least
-   contain the root `toctree` directive.
-
-Welcome to OpenTuner's documentation!
-=====================================
-This is still under construction
-
-
-Contents:
-
-.. toctree::
-   :maxdepth: 2
-
-   params
-   techniques
-
-
-
-Indices and tables
-==================
-
-* :ref:`genindex`
-* :ref:`modindex`
-* :ref:`search`
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/params.rst b/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/params.rst
deleted file mode 100644
index b8d08cd300d466c5be43dabfa0e4db1abd12182b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/params.rst
+++ /dev/null
@@ -1,339 +0,0 @@
-.. currentmodule:: opentuner.search.manipulator
-
-****************
-Parameters
-****************
-
-This will be an overview of parameters in OpenTuner.
-
-Each Parameter instance is created with a name. Most methods in parameters operate on configurations, dict-like objects spawned by the ConfigurationManipulator. Configurations contain values corresponding to a collection of instances of named parameters.
-
-A Parameter’s methods may mutate the value in a configuration corresponding to the name of the particular parameter instance. These methods are called operators.
-
-==============================
-Parameter Types and Operators
-==============================
-
-Each parameter has a set of operators. These operators take in a set of parent configurations and mutate the corresponding parameter value in the first configuration according to the parent values. Operators form the set of available transformations for search techniques to generate new configurations to test.
-
-Operator methods can be identified by the prefix 'op#_', where # is the number of required input configurations. The prefix 'opn\_' specifies an arbitrary number of input configurations, as a list. The first argument into an operator is always the configuration that will be mutated. This is followed by the required parent configurations, then any required arguments, and finally optional arguments.
-
-Any operators defined for a Parameter are inherited by its subclasses.
-
------------------
-Parameter
------------------
-This is an abstract base interface for parameters.
-
-.. autoclass:: Parameter
-
-	.. automethod:: op1_randomize
-
-	.. automethod:: op3_swarm
-
-	.. automethod:: op4_set_linear
-
-	.. automethod:: opn_stochastic_mix
-
-
--------------------------
-Primitive Parameter
--------------------------
-.. autoclass:: PrimitiveParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`Parameter.op1_randomize`,
-	:meth:`Parameter.op3_swarm`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op1_normal_mutation
-
-	**This paragraph can have examples for the above operator**
-
-	.. automethod:: op4_set_linear
-
-
-------------------------
-Numeric Parameter
-------------------------
-.. autoclass:: NumericParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`PrimitiveParameter.op1_normal_mutation`,
-	:meth:`Parameter.op3_swarm`,
-	:meth:`PrimitiveParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op1_randomize
-
-	.. automethod:: op1_scale
-
-	.. automethod:: op3_difference
-
-	.. automethod:: opn_sum
-
-
-------------------------
-Integer Parameter
-------------------------
-.. autoclass:: IntegerParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`PrimitiveParameter.op1_normal_mutation`,
-	:meth:`NumericParameter.op1_randomize`,
-	:meth:`NumericParameter.op1_scale`,
-	:meth:`NumericParameter.op3_difference`,
-	:meth:`PrimitiveParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`,
-	:meth:`NumericParameter.opn_sum`
-
-	.. automethod:: op3_swarm
-
-
-------------------------
-Float Parameter
-------------------------
-.. autoclass:: FloatParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`PrimitiveParameter.op1_normal_mutation`,
-	:meth:`NumericParameter.op1_randomize`,
-	:meth:`NumericParameter.op1_scale`,
-	:meth:`NumericParameter.op3_difference`,
-	:meth:`PrimitiveParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`,
-	:meth:`NumericParameter.opn_sum`
-
-	.. automethod:: op3_swarm
-
-
-------------------------
-ScaledNumericParameter
-------------------------
-.. autoclass:: ScaledNumericParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`PrimitiveParameter.op1_normal_mutation`,
-	:meth:`NumericParameter.op1_randomize`,
-	:meth:`NumericParameter.op1_scale`,
-	:meth:`NumericParameter.op3_difference`,
-	:meth:`Parameter.op3_swarm`,
-	:meth:`PrimitiveParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`,
-	:meth:`NumericParameter.opn_sum`
-
-
-------------------------
-LogIntegerParameter
-------------------------
-.. autoclass:: LogIntegerParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`PrimitiveParameter.op1_normal_mutation`,
-	:meth:`NumericParameter.op1_randomize`,
-	:meth:`NumericParameter.op1_scale`,
-	:meth:`NumericParameter.op3_difference`,
-	:meth:`FloatParameter.op3_swarm`,
-	:meth:`PrimitiveParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`,
-	:meth:`NumericParameter.opn_sum`
-
-
-------------------------
-LogFloatParameter
-------------------------
-.. autoclass:: LogFloatParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`PrimitiveParameter.op1_normal_mutation`,
-	:meth:`NumericParameter.op1_randomize`,
-	:meth:`NumericParameter.op1_scale`,
-	:meth:`NumericParameter.op3_difference`,
-	:meth:`FloatParameter.op3_swarm`,
-	:meth:`PrimitiveParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`,
-	:meth:`NumericParameter.opn_sum`
-
-
-------------------------
-PowerOfTwoParameter
-------------------------
-.. autoclass:: LogFloatParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`PrimitiveParameter.op1_normal_mutation`,
-	:meth:`NumericParameter.op1_randomize`,
-	:meth:`NumericParameter.op1_scale`,
-	:meth:`NumericParameter.op3_difference`,
-	:meth:`IntegerParameter.op3_swarm`,
-	:meth:`PrimitiveParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`,
-	:meth:`NumericParameter.opn_sum`
-
-
-------------------------
-Complex Parameter
-------------------------
-.. autoclass:: ComplexParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`Parameter.op3_swarm`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op1_randomize
-
-	.. automethod:: op4_set_linear
-
-
-------------------------
-Boolean Parameter
-------------------------
-.. autoclass:: BooleanParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`Parameter.op3_swarm`,
-	:meth:`ComplexParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op1_flip
-
-	.. automethod:: op1_randomize
-
-	.. automethod:: op3_swarm
-
---------------------------
-Switch Parameter
---------------------------
-.. autoclass:: SwitchParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`Parameter.op3_swarm`,
-	:meth:`ComplexParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op1_randomize
-
---------------------------
-Enum Parameter
---------------------------
-.. autoclass:: EnumParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`Parameter.op3_swarm`,
-	:meth:`ComplexParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op1_randomize
-
-
---------------------------
-Permutation Parameter
---------------------------
-.. autoclass:: PermutationParameter
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`ComplexParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op1_randomize
-
-	.. automethod:: op1_small_random_change
-
-	.. automethod:: op2_random_swap
-
-	.. automethod:: op2_random_invert
-
-	.. automethod:: op3_cross
-
-	.. automethod:: op3_cross_PX
-
-	.. automethod:: op3_cross_PMX
-
-	.. automethod:: op3_cross_CX
-
-	.. automethod:: op3_cross_OX1
-
-	.. automethod:: op3_cross_OX3
-
-	.. automethod:: op3_swarm
-
---------------------------
-Array
---------------------------
-.. autoclass:: Array
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`ComplexParameter.op1_randomize`,
-	:meth:`ComplexParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op3_cross
-
-	.. automethod:: op3_swarm
-
-
---------------------------
-BooleanArray
---------------------------
-.. autoclass:: BooleanArray
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`Array.op3_cross`,
-	:meth:`Array.op3_swarm`,
-	:meth:`ComplexParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op1_randomize
-
-	.. automethod:: op3_swarm_parallel
-
-
---------------------------
-FloatArray
---------------------------
-.. autoclass:: FloatArray
-	:show-inheritance:
-
-	*Inherited Operators:*
-
-	:meth:`Array.op3_cross`,
-	:meth:`Array.op3_swarm`,
-	:meth:`ComplexParameter.op4_set_linear`,
-	:meth:`Parameter.opn_stochastic_mix`
-
-	.. automethod:: op1_randomize
-
-	.. automethod:: op3_swarm_parallel
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/techniques.rst b/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/techniques.rst
deleted file mode 100644
index 3bbebddedbd9c4b999fa8a4f58244ee482ffe673..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/docs/source/techniques.rst
+++ /dev/null
@@ -1,51 +0,0 @@
-.. currentmodule:: opentuner.search.composableevolutionarytechniques
-
-********************
-Current Techniques
-********************
-
-OpenTuner has a library of existing search techniques.
-
-=================================
-Composable Search Techniques
-=================================
-
-A ComposableEvolutionaryTechnique allows for composition between the search technique and any operators. Creating a ComposableEvolutionaryTechnique requires implementing 3 methods:
-
- * :meth:`minimum_number_of_parents <ComposableEvolutionaryTechnique.minimum_number_of_parents>`
- * :meth:`get_parents <ComposableEvolutionaryTechnique.get_parents>`
- * :meth:`update_population <ComposableEvolutionaryTechnique.update_population>`
-
-Additionally, the following methods may be overridden for further customization
-
- * :meth:`make_population_member <ComposableEvolutionaryTechnique.make_population_member>`
- * :meth:`select_parameters <ComposableEvolutionaryTechnique.select_parameters>`
- * :meth:`get_default_operator <ComposableEvolutionaryTechnique.get_default_operator>`
-
-The following methods are useful when choosing parents or updating the population:
-
- * :meth:`lt <ComposableEvolutionaryTechnique.lt>`
- * :meth:`lte <ComposableEvolutionaryTechnique.lte>`
- * :meth:`get_global_best_configuration <ComposableEvolutionaryTechnique.get_global_best_configuration>`
-
-A ComposableEvolutionaryTechnique will yields configurations generated by successive iterations of applying operators on the configurations returned by :meth:`get_parents <ComposableEvolutionaryTechnique.get_parents>` and updating the population with the new configuration through :meth:`update_population <ComposableEvolutionaryTechnique.update_population>`
-
-.. autoclass:: ComposableEvolutionaryTechnique
-
-	.. automethod:: minimum_number_of_parents
-
-	.. automethod:: get_parents
-
-	.. automethod:: update_population
-
-	.. automethod:: make_population_member
-
-	.. automethod:: select_parameters
-
-	.. automethod:: get_default_operator
-
-	.. automethod:: lt
-
-	.. automethod:: lte
-
-	.. automethod:: get_global_best_configuration
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/.gitignore b/llvm/projects/hpvm-tensor-rt/opentuner/examples/.gitignore
deleted file mode 100644
index f525a6259ba8a55dbb66c2eb9b3489e9784ae523..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-*-journal
-stats
-opentuner.log
-opentuner.db
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/.gitignore b/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/.gitignore
deleted file mode 100644
index fab2c2b13c5afd35380ae5cf8f4317d2acc58a06..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/.gitignore
+++ /dev/null
@@ -1,5 +0,0 @@
-tmp.bin
-cc_flags.json
-gccflags_final_config.cmd
-gccflags_final_config.json
-cc_params.json
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/adddeps.py
deleted file mode 100644
index ede22a8fcdb2a94db7915ff3beb90894b2cb8592..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/adddeps.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/.gitignore b/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/.gitignore
deleted file mode 100644
index f06d3e01a2bedbfadb4c05ad181eb8745ac2f608..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-fft.c
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/matrixmultiply.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/matrixmultiply.cpp
deleted file mode 100644
index 9989ffbf4a2ff1f6dffcfbdcab1b7e3f3116c7a1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/matrixmultiply.cpp
+++ /dev/null
@@ -1,80 +0,0 @@
-// based on: http://blogs.msdn.com/b/xiangfan/archive/2009/04/28/optimize-your-code-matrix-multiplication.aspx
-//  by Xiang Fan
-
-#include <algorithm>
-#include <iostream>
-
-#define N 512
-
-
-template<class T>
-T** make_test_matrix() {
-    T** data = new T*[N];
-    for (int i = 0; i < N; i++) {
-        data[i] = new T[N];
-    }
-    for(int i = 0; i < N; i++) {
-        for(int j = 0; j < N; j++) {
-            data[i][j] = (int) i * j;
-        }
-    }
-    return data;
-}
-
-
-
-template<typename T>
-void Transpose(int size, T** __restrict__ m)
-{
-    for (int i = 0; i < size; i++) {
-        for (int j = i + 1; j < size; j++) {
-            std::swap(m[i][j], m[j][i]);
-        }
-    }
-}
-template<typename T>
-void SeqMatrixMult3(int size, T** __restrict__ m1, T** __restrict__ m2,
-                    T** __restrict__ result) {
-    Transpose(size, m2);
-    for (int i = 0; i < size; i++) {
-        for (int j = 0; j < size; j++) {
-            T c = 0;
-            for (int k = 0; k < size; k++) {
-                c += m1[i][k] * m2[j][k];
-            }
-            result[i][j] = c;
-        }
-    }
-    Transpose(size, m2);
-}
-
-
-template<typename T>
-void test() {
-  T** a = make_test_matrix<T>();
-  T** b = make_test_matrix<T>();
-  T** c = make_test_matrix<T>();
-  SeqMatrixMult3(N, a, b, c);
-
-
-  T avg = 0;
-  for(int i = 0; i < N; i++) {
-      for(int j = 0; j < N; j++) {
-          avg += c[i][j] / (T)(N*N);
-      }
-  }
-  // print out average so caller can check answer
-  std::cout << avg << std::endl;
-}
-
-
-int main(int argc, const char** argv) {
-  test<float>();
-  return 0;
-}
-
-
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/raytracer.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/raytracer.cpp
deleted file mode 100644
index 3cb1192c6a0d9cbd3502186dc391efa71d5cde18..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/raytracer.cpp
+++ /dev/null
@@ -1,277 +0,0 @@
-/*
-	A very basic raytracer example.
-	Copyright (C) 2012  www.scratchapixel.com
-
-	This program is free software: you can redistribute it and/or modify
-	it under the terms of the GNU General Public License as published by
-	the Free Software Foundation, either version 3 of the License, or
-	(at your option) any later version.
-
-	This program is distributed in the hope that it will be useful,
-	but WITHOUT ANY WARRANTY; without even the implied warranty of
-	MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-	GNU General Public License for more details.
-
-	You should have received a copy of the GNU General Public License
-	along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-	- changes 02/04/13: fixed flag in ofstream causing a bug under Windows,
-	added default values for M_PI and INFINITY
-	- changes 24/05/13: small change to way we compute the refraction direction
-	vector (eta=ior if we are inside and 1/ior if we are outside the sphere)
-
-	Compile with the following command: c++ -o raytracer -O3 -Wall raytracer.cpp
-
-*/
-
-#include <cstdlib>
-#include <cstdio>
-#include <cmath>
-#include <fstream>
-#include <vector>
-#include <iostream>
-#include <cassert>
-
-#if defined(__linux__) || defined(__APPLE__)
-	// "Compiled for Linux
-#else
-	// Windows doesn't define these values by default, Linux does
-	#define M_PI 3.141592653589793
-	#define INFINITY 1e8
-#endif
-
-template<typename T>
-class Vec3
-{
-public:
-	T x, y, z;
-	Vec3() : x(T(0)), y(T(0)), z(T(0)) {}
-	Vec3(T xx) : x(xx), y(xx), z(xx) {}
-	Vec3(T xx, T yy, T zz) : x(xx), y(yy), z(zz) {}
-	Vec3& normalize()
-	{
-		T nor2 = length2();
-		if (nor2 > 0) {
-			T invNor = 1 / sqrt(nor2);
-			x *= invNor, y *= invNor, z *= invNor;
-		}
-		return *this;
-	}
-	Vec3<T> operator * (const T &f) const { return Vec3<T>(x * f, y * f, z * f); }
-	Vec3<T> operator * (const Vec3<T> &v) const { return Vec3<T>(x * v.x, y * v.y, z * v.z); }
-	T dot(const Vec3<T> &v) const { return x * v.x + y * v.y + z * v.z; }
-	Vec3<T> operator - (const Vec3<T> &v) const { return Vec3<T>(x - v.x, y - v.y, z - v.z); }
-	Vec3<T> operator + (const Vec3<T> &v) const { return Vec3<T>(x + v.x, y + v.y, z + v.z); }
-	Vec3<T>& operator += (const Vec3<T> &v) { x += v.x, y += v.y, z += v.z; return *this; }
-	Vec3<T>& operator *= (const Vec3<T> &v) { x *= v.x, y *= v.y, z *= v.z; return *this; }
-	Vec3<T> operator - () const { return Vec3<T>(-x, -y, -z); }
-	T length2() const { return x * x + y * y + z * z; }
-	T length() const { return sqrt(length2()); }
-	friend std::ostream & operator << (std::ostream &os, const Vec3<T> &v)
-	{
-		os << "[" << v.x << " " << v.y << " " << v.z << "]";
-		return os;
-	}
-};
-
-template<typename T>
-class Sphere
-{
-public:
-	Vec3<T> center;                         /// position of the sphere
-	T radius, radius2;                      /// sphere radius and radius^2
-	Vec3<T> surfaceColor, emissionColor;    /// surface color and emission (light)
-	T transparency, reflection;             /// surface transparency and reflectivity
-	Sphere(const Vec3<T> &c, const T &r, const Vec3<T> &sc, 
-		const T &refl = 0, const T &transp = 0, const Vec3<T> &ec = 0) : 
-		center(c), radius(r), radius2(r * r), surfaceColor(sc), emissionColor(ec),
-		transparency(transp), reflection(refl)
-	{}
-	// compute a ray-sphere intersection using the geometric solution
-	bool intersect(const Vec3<T> &rayorig, const Vec3<T> &raydir, T *t0 = NULL, T *t1 = NULL) const
-	{
-		Vec3<T> l = center - rayorig;
-		T tca = l.dot(raydir);
-		if (tca < 0) return false;
-		T d2 = l.dot(l) - tca * tca;
-		if (d2 > radius2) return false;
-		T thc = sqrt(radius2 - d2);
-		if (t0 != NULL && t1 != NULL) {
-			*t0 = tca - thc;
-			*t1 = tca + thc;
-		}
-
-		return true;
-	}
-};
-
-#define MAX_RAY_DEPTH 5
-
-template<typename T>
-T mix(const T &a, const T &b, const T &mix)
-{
-	return b * mix + a * (T(1) - mix);
-}
-
-// This is the main trace function. It takes a ray as argument (defined by its origin
-// and direction). We test if this ray intersects any of the geometry in the scene.
-// If the ray intersects an object, we compute the intersection point, the normal
-// at the intersection point, and shade this point using this information.
-// Shading depends on the surface property (is it transparent, reflective, diffuse).
-// The function returns a color for the ray. If the ray intersects an object that
-// is the color of the object at the intersection point, otherwise it returns
-// the background color.
-template<typename T>
-Vec3<T> trace(const Vec3<T> &rayorig, const Vec3<T> &raydir, 
-	const std::vector<Sphere<T> *> &spheres, const int &depth)
-{
-	//if (raydir.length() != 1) std::cerr << "Error " << raydir << std::endl;
-	T tnear = INFINITY;
-	const Sphere<T> *sphere = NULL;
-	// find intersection of this ray with the sphere in the scene
-	for (unsigned i = 0; i < spheres.size(); ++i) {
-		T t0 = INFINITY, t1 = INFINITY;
-		if (spheres[i]->intersect(rayorig, raydir, &t0, &t1)) {
-			if (t0 < 0) t0 = t1;
-			if (t0 < tnear) {
-				tnear = t0;
-				sphere = spheres[i];
-			}
-		}
-	}
-	// if there's no intersection return black or background color
-	if (!sphere) return Vec3<T>(2);
-	Vec3<T> surfaceColor = 0; // color of the ray/surfaceof the object intersected by the ray
-	Vec3<T> phit = rayorig + raydir * tnear; // point of intersection
-	Vec3<T> nhit = phit - sphere->center; // normal at the intersection point
-	nhit.normalize(); // normalize normal direction
-	// If the normal and the view direction are not opposite to each other 
-	// reverse the normal direction. That also means we are inside the sphere so set
-	// the inside bool to true. Finally reverse the sign of IdotN which we want
-	// positive.
-	T bias = 1e-4; // add some bias to the point from which we will be tracing
-	bool inside = false;
-	if (raydir.dot(nhit) > 0) nhit = -nhit, inside = true;
-	if ((sphere->transparency > 0 || sphere->reflection > 0) && depth < MAX_RAY_DEPTH) {
-		T facingratio = -raydir.dot(nhit);
-		// change the mix value to tweak the effect
-		T fresneleffect = mix<T>(pow(1 - facingratio, 3), 1, 0.1); 
-		// compute reflection direction (not need to normalize because all vectors
-		// are already normalized)
-		Vec3<T> refldir = raydir - nhit * 2 * raydir.dot(nhit);
-		refldir.normalize();
-		Vec3<T> reflection = trace(phit + nhit * bias, refldir, spheres, depth + 1);
-		Vec3<T> refraction = 0;
-		// if the sphere is also transparent compute refraction ray (transmission)
-		if (sphere->transparency) {
-			T ior = 1.1, eta = (inside) ? ior : 1 / ior; // are we inside or outside the surface?
-			T cosi = -nhit.dot(raydir);
-			T k = 1 - eta * eta * (1 - cosi * cosi);
-			Vec3<T> refrdir = raydir * eta + nhit * (eta *  cosi - sqrt(k));
-			refrdir.normalize();
-			refraction = trace(phit - nhit * bias, refrdir, spheres, depth + 1);
-		}
-		// the result is a mix of reflection and refraction (if the sphere is transparent)
-		surfaceColor = (reflection * fresneleffect + 
-			refraction * (1 - fresneleffect) * sphere->transparency) * sphere->surfaceColor;
-	}
-	else {
-		// it's a diffuse object, no need to raytrace any further
-		for (unsigned i = 0; i < spheres.size(); ++i) {
-			if (spheres[i]->emissionColor.x > 0) {
-				// this is a light
-				Vec3<T> transmission = 1;
-				Vec3<T> lightDirection = spheres[i]->center - phit;
-				lightDirection.normalize();
-				for (unsigned j = 0; j < spheres.size(); ++j) {
-					if (i != j) {
-						T t0, t1;
-						if (spheres[j]->intersect(phit + nhit * bias, lightDirection, &t0, &t1)) {
-							transmission = 0;
-							break;
-						}
-					}
-				}
-				surfaceColor += sphere->surfaceColor * transmission * 
-					std::max(T(0), nhit.dot(lightDirection)) * spheres[i]->emissionColor;
-			}
-		}
-	}
-
-	return surfaceColor + sphere->emissionColor;
-}
-
-// Main rendering function. We compute a camera ray for each pixel of the image
-// trace it and return a color. If the ray hits a sphere, we return the color of the
-// sphere at the intersection point, else we return the background color.
-template<typename T>
-unsigned int render(const std::vector<Sphere<T> *> &spheres)
-{
-	unsigned width = 640, height = 480;
-	Vec3<T> *image = new Vec3<T>[width * height], *pixel = image;
-	T invWidth = 1 / T(width), invHeight = 1 / T(height);
-	T fov = 30, aspectratio = width / T(height);
-	T angle = tan(M_PI * 0.5 * fov / T(180));
-	// Trace rays
-	for (unsigned y = 0; y < height; ++y) {
-		for (unsigned x = 0; x < width; ++x, ++pixel) {
-			T xx = (2 * ((x + 0.5) * invWidth) - 1) * angle * aspectratio;
-			T yy = (1 - 2 * ((y + 0.5) * invHeight)) * angle;
-			Vec3<T> raydir(xx, yy, -1);
-			raydir.normalize();
-			*pixel = trace(Vec3<T>(0), raydir, spheres, 0);
-		}
-	}
-#if 0
-	// Save result to a PPM image (keep these flags if you compile under Windows)
-	std::ofstream ofs("./untitled.ppm", std::ios::out | std::ios::binary);
-	ofs << "P6\n" << width << " " << height << "\n255\n";
-	for (unsigned i = 0; i < width * height; ++i) {
-		ofs << (unsigned char)(std::min(T(1), image[i].x) * 255) << 
-		(unsigned char)(std::min(T(1), image[i].y) * 255) <<
-		(unsigned char)(std::min(T(1), image[i].z) * 255); 
-	}
-	ofs.close();
-#endif
-
-  unsigned int bad_hash = 0;
-	for (unsigned i = 0; i < width * height; ++i) {
-    bad_hash = bad_hash*31 + (unsigned int)(std::min(T(1), image[i].x) * 255);
-    bad_hash = bad_hash*31 + (unsigned int)(std::min(T(1), image[i].y) * 255);
-    bad_hash = bad_hash*31 + (unsigned int)(std::min(T(1), image[i].z) * 255);
-	}
-	delete [] image;
-
-  return bad_hash;
-}
-
-volatile unsigned int dont_optimize_me;
-
-int main(int argc, char **argv) {
-	srand48(13);
-	std::vector<Sphere<float> *> spheres;
-	// position, radius, surface color, reflectivity, transparency, emission color
-	spheres.push_back(new Sphere<float>(Vec3<float>(0, -10004, -20), 10000, Vec3<float>(0.2), 0, 0.0));
-	spheres.push_back(new Sphere<float>(Vec3<float>(0, 0, -20), 4, Vec3<float>(1.00, 0.32, 0.36), 1, 0.5));
-	spheres.push_back(new Sphere<float>(Vec3<float>(5, -1, -15), 2, Vec3<float>(0.90, 0.76, 0.46), 1, 0.0));
-	spheres.push_back(new Sphere<float>(Vec3<float>(5, 0, -25), 3, Vec3<float>(0.65, 0.77, 0.97), 1, 0.0));
-	spheres.push_back(new Sphere<float>(Vec3<float>(-5.5, 0, -15), 3, Vec3<float>(0.90, 0.90, 0.90), 1, 0.0));
-	// light
-	spheres.push_back(new Sphere<float>(Vec3<float>(0, 20, -30), 3, Vec3<float>(0), 0, 0, Vec3<float>(3)));
-
-  dont_optimize_me = render<float>(spheres);
-  __asm__ __volatile__ ("" ::: "memory"); // memory barrier
-  if(dont_optimize_me == 0x4bd7c0e0) {
-    //printf("CORRECT\n");
-  } else {
-    printf("ERROR: WRONG ANSWER\n");
-  }
-
-	while (!spheres.empty()) {
-		Sphere<float> *sph = spheres.back();
-		spheres.pop_back();
-		delete sph;
-	}
-
-	return 0;
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/tsp_ga.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/tsp_ga.cpp
deleted file mode 100644
index 0e8f232cb099d37facffa0440b41dfd57efd4b2e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/apps/tsp_ga.cpp
+++ /dev/null
@@ -1,548 +0,0 @@
-//
-// based on: https://bitbucket.org/knordkvist/tsp-ga/overview
-// by Kristoffer Nordkvist 
-//
-#include <algorithm>
-#include <assert.h>
-#include <iostream>
-#include <limits>
-#include <math.h>
-#include <sstream>
-#include <stdio.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <stdlib.h>
-#include <string.h>
-#include <string>
-#include <time.h>
-
-class TSP
-{
-	public:
-		TSP(const double crossoverProbability, const double mutationProbability);
-
-		/* The constants used in this project */
-		static const unsigned int chromosones = 30, cities = 20, xMin = 0, xMax = 1000, yMin = 0, yMax = 500;
-
-		/* Generate a random population of chromosones */
-		void randomPopulation();
-
-		/* Create a new population using crossover and mutation */
-		void nextPopulation();
-
-		/* Returns the fitness of the best chromosone */
-		double getBestFitness() const;
-
-		/* Returns a string representation of the best path */
-		std::string getBestPathString() const;
-
-		/* Returns the total distance of the best chromosone path */
-		double getLowestTotalDistance() const;
-
-		/* Returns the populations average length */
-		double getAverageDistance() const;
-	private:
-		const double crossoverProbability, mutationProbability;
-
-		/* Gets the total distance of the supplied path */
-		double totalDistance(int const * const chromosone) const;
-
-		/* The coordinates for each city, (x,y) for the first city is found in (citiesX[0], citiesY[0]) */
-		double citiesX[cities], citiesY[cities];
-
-		/* The chromosone containing the shortest path */
-		int *bestChromosone;
-
-		/* Contains the current population of chromosones */
-		int (* solutions)[cities],
-			/* The two chromosones with the best fitness functions */
-			//bestChromosone1[cities], bestChromosone2[cities],
-			/* Used to store the new chromosones when creating a new population */
-			(* newPopulation)[cities];
-
-		/* Returns a random double r, 0 <= r <= max */
-		static double randomInclusive(const double max);
-
-		/* Returns a random double r, 0 <= r < max */
-		static double randomExclusive(const double max);
-
-		/* True if the two chromosones represent the same path */
-		static bool areChromosonesEqual(int const * const chromosoneA, int const * const chromosoneB);
-
-		/* Evaluate the fitness the supplied chromosone */
-		double evaluateFitness(const int * const chromosone) const;
-
-		/* Selects a chromosone from the current population using Roulette Wheel Selection.
-		 * Using the algorithm described in http://www.obitko.com/tutorials/genetic-algorithms/selection.php.
-		 */
-		int * rouletteSelection(double const * const fitness) const;
-
-		/* Replace the element at offspringIndex with the first element found in other that does not exist in offspringToRepair */
-		void repairOffspring(int * const offspringToRepair, int missingIndex, const int * const other);
-
-		/* Might swap one gene with another, depending on the mutation probability */
-		void mutate(int * const chromosone);
-
-		/* Cross over the parents to form new offspring using Multi-Point Crossover, collisions are handled as shown in lecture 5.
-		 * The chromosones might be a copy of their parents, depending on the crossover probability.
-		 */
-		void crossover(const int * const parentA, const int * const parentB, int * const offspringA, int * const offspringB);
-
-		/* Checks if the supplied chromosone is in newPopulation */
-		bool hasDuplicate(const int * const chromosone, size_t populationCount);
-
-		/* Copies the supplied chromosone to the new population */
-		void copyToNewPopulation(const int * const chromosone, size_t index);
-
-		/* Make the chromosone represent a path, which is chosen by random */
-		static void setRandomPath(int * const chromosone);
-};
-
-using namespace std;
-
-TSP::TSP(double crossoverProbability, double mutationProbability) : crossoverProbability(crossoverProbability),
-	mutationProbability(mutationProbability), solutions(new int[chromosones][cities]), newPopulation(new int[chromosones][cities])
-{
-	/* Seed the random number generator */
-  //srand((unsigned int)time(NULL));
-  srand(17);
-	/* Use the same number to generate a specific sequence */
-	//srand(0);
-	/* Set random coordinates */
-	for(size_t coordinateIndex = 0; coordinateIndex < cities; ++coordinateIndex)
-	{
-		/* 0 <= x <= xMax */
-		citiesX[coordinateIndex] = randomInclusive(xMax);
-		/* 0 <= y <= yMax */
-		citiesY[coordinateIndex] = randomInclusive(yMax);
-	}
-
-	/* Generate random population */
-	randomPopulation();
-}
-
-void TSP::randomPopulation()
-{
-	/* Iterate throught each chromosone... */
-	for(size_t chromosoneIndex = 0; chromosoneIndex < chromosones; ++chromosoneIndex)
-	{
-		/* ... and give it a random path */
-		setRandomPath(solutions[chromosoneIndex]);
-	}
-}
-
-double TSP::getBestFitness() const
-{
-	return evaluateFitness(bestChromosone);
-}
-
-double TSP::getAverageDistance() const
-{
-	double distance = 0;
-	for(size_t chromosoneIndex = 0; chromosoneIndex < chromosones; ++chromosoneIndex)
-	{
-		distance += totalDistance(solutions[chromosoneIndex]);
-	}
-	return distance/chromosones;
-}
-
-string TSP::getBestPathString() const
-{
-	stringstream path;
-	for(size_t gene = 0; gene < cities; ++gene)
-	{
-		if(gene != 0)
-		{
-			path << ",";
-		}
-		path << bestChromosone[gene];
-	}
-	return path.str();
-}
-
-double TSP::getLowestTotalDistance() const
-{
-	return totalDistance(bestChromosone);
-}
-
-void TSP::nextPopulation()
-{
-	double fitness[chromosones];
-	/* Fill an array with a fitness score for each chromosone,
-	 * the index of a score corresponds with the chromosone's index in solutions[index]
-	 */
-	for(size_t chromosoneIndex = 0; chromosoneIndex < chromosones; ++chromosoneIndex)
-	{
-		fitness[chromosoneIndex] = evaluateFitness(solutions[chromosoneIndex]);
-	}
-	
-	/* Use elitism, find and copy over the two best chromosones to the new population */
-	int eliteIndex1 = 0, eliteIndex2 = 0;
-	/* find the best solution */
-	eliteIndex1 = max_element(fitness, fitness + chromosones) - fitness;
-	this->bestChromosone = solutions[eliteIndex1];
-
-	double highestFitness = 0;
-	/* Find the second best solution */
-	for(size_t chromosoneIndex = 0; chromosoneIndex < chromosones; ++chromosoneIndex)
-	{
-		if(chromosoneIndex != eliteIndex1 && fitness[chromosoneIndex] > highestFitness)
-		{
-			highestFitness = fitness[chromosoneIndex];
-			eliteIndex2 = chromosoneIndex;
-		}
-	}
-
-	/* Keep track of how many chromosones exists in the new population */
-	size_t offspringCount = 0;
-	/* Copy over the two best solutions to the new population */
-	copyToNewPopulation(solutions[eliteIndex1], offspringCount);
-	++offspringCount;
-	copyToNewPopulation(solutions[eliteIndex2], offspringCount);
-	++offspringCount;
-
-	/* Create the rest of the new population, break this loop when the new population is complete */
-	while(true)
-	{
-		int * parentA;
-		int * parentB;
-		parentA = rouletteSelection(fitness);
-		parentB = rouletteSelection(fitness);
-		while (parentB == parentA)
-		{
-			parentB = rouletteSelection(fitness);
-		}
-		int offspringA[cities];
-		int offspringB[cities];
-		crossover(parentA, parentB, offspringA, offspringB);
-		mutate(offspringA);
-		mutate(offspringB);
-		
-		/* Add to new population if an equal chromosone doesn't exist already */
-		if(!hasDuplicate(offspringA, offspringCount))
-		{
-			copyToNewPopulation(offspringA, offspringCount);
-			++offspringCount;
-		}
-		/* We need to check if the new population is filled */
-		if(offspringCount == chromosones)
-		{
-			break;
-		}
-		if(!hasDuplicate(offspringB, offspringCount))
-		{
-			copyToNewPopulation(offspringB, offspringCount);
-			++offspringCount;
-		}
-		/* Check again so that we don't accidentaly write all over the heap and have to spend an evening wondering why the heap is corrupt... :) */
-		if(offspringCount == chromosones)
-		{
-			break;
-		}
-	}
-
-	/*
-	 * We now have a new population,
-	 * now it needs to replace the current population
-	 * so that we don't go through the same population every time we run this function
-	 */
-	for(size_t chromosoneIndex = 0; chromosoneIndex < chromosones; ++chromosoneIndex)
-	{
-		memcpy(solutions[chromosoneIndex], newPopulation[chromosoneIndex], sizeof(int) * cities);
-	}
-}
-
-bool TSP::hasDuplicate(const int * const chromosone, size_t populationCount)
-{
-	/* Iterate throught each chromosone in newPopulation and compare them gene by gene */
-	for(size_t chromosoneIndex = 0; chromosoneIndex < populationCount; ++chromosoneIndex)
-	{
-		int genesCompared = 0;
-		for(size_t gene = 0; gene < cities; ++gene)
-		{
-			if(chromosone[gene] != newPopulation[chromosoneIndex][gene])
-			{
-				/* These chromosones are not equal! */
-				break;
-			}
-			++genesCompared;
-		}
-
-		if(genesCompared == cities)
-		{
-			return true;
-		}
-	}
-
-	return false;
-}
-
-void TSP::mutate(int * const chromosone)
-{
-	/* 0.0 <= random <= 1 */
-	{
-		double random = randomInclusive(1);
-		/* Nope, didn't happen */
-		if(random > mutationProbability)
-		{
-			return;
-		}
-	}
-
-	int tmp;
-	int random1 = (int)randomExclusive(cities);
-	int random2 = (int)randomExclusive(cities);
-	while(random1 == random2)
-	{
-		random2 = (int)randomExclusive(cities);
-	}
-
-	tmp = chromosone[random1];
-	chromosone[random1] = chromosone[random2];
-	chromosone[random2] = tmp;
-
-}
-
-void TSP::crossover(int const * const parentA, const int * const parentB, int * offspringA, int * offspringB)
-{
-	{
-		/* There is a chance we don't perform a crossover,
-		 * in that case the offspring is a copy of the parents
-		 */
-		/* 0.0 <= random <= 1 */
-		double random = randomInclusive(1);
-		/* The offspring is a copy of their parents */
-		if(random > crossoverProbability)
-		{
-			memcpy(offspringA, parentA, sizeof(int) * cities);
-			memcpy(offspringB, parentB, sizeof(int) * cities);
-			return;
-		}
-	}
-	/* Perform multi-point crossover to generate offspring */
-
-	/* 0 <= cuttOffIndex <= cities */
-	int cuttOffIndex1 = (int)randomInclusive(cities);
-	int cuttOffIndex2 = (int)randomInclusive(cities);
-	while(cuttOffIndex2 == cuttOffIndex1)
-	{
-		cuttOffIndex2 = (int)randomExclusive(cities);
-	}
-
-	unsigned int start;
-	unsigned int end;
-	if(cuttOffIndex1 < cuttOffIndex2)
-	{
-		start = cuttOffIndex1;
-		end = cuttOffIndex2;
-	}
-	else
-	{
-		start = cuttOffIndex2;
-		end = cuttOffIndex1;
-	}
-	/* Offspring A is initially copy of parent A */
-	memcpy(offspringA, parentA, sizeof(int) * cities);
-	/* Offspring B is initially copy of parent B */
-	memcpy(offspringB, parentB, sizeof(int) * cities);
-
-	/* Put a sequence of parent B in offspring A */
-	memcpy(offspringA + start, parentB + start, sizeof(int) * (end - start));
-	/* Put a sequence of parent A in offspring B */
-	memcpy(offspringB + start, parentA + start, sizeof(int) * (end - start));
-
-	/* Mark collisions in offspring with -1*/
-	for(size_t cityIndex = 0; cityIndex  < cities; ++cityIndex)
-	{
-		/* Index is part of the parent sequence */
-		if((cityIndex  >= start && cityIndex  < end)) {
-			/* Do nothing, we want to keep this sequence intact */
-		}
-		else
-		{
-			/* Check if the item at cityIndex also occurs somewhere in the copied substring */
-			for(size_t substringIndex = start; substringIndex < end; ++substringIndex)
-			{
-				/* A duplicate, mark it */
-				if(offspringA[cityIndex] == offspringA[substringIndex])
-				{
-					offspringA[cityIndex] = -1;
-				}
-				if(offspringB[cityIndex] == offspringB[substringIndex])
-				{
-					offspringB[cityIndex] = -1;
-				}
-			}
-		}
-
-	}
-
-	/*
-	* Go through the offspring,
-	* if an element is marked we fill the hole with an element from the other offspring
-	*/
-	for(size_t offspringIndex = 0; offspringIndex < cities; ++offspringIndex)
-	{
-		/* There is a hole here */
-		if(offspringA[offspringIndex] == -1)
-		{
-			repairOffspring(offspringA, offspringIndex, offspringB);
-		}
-		if(offspringB[offspringIndex] == -1)
-		{
-			repairOffspring(offspringB, offspringIndex, offspringA);
-		}
-	}
-}
-
-void TSP::repairOffspring(int * const offspringToRepair, int missingIndex, const int * const other)
-{
-	/* Iterate through the other offspring until we find an element which doesn't exist in the offspring we are repairing */
-	for(size_t patchIndex = 0; patchIndex < cities; ++patchIndex)
-	{
-		/* Look for other[patchIndex] in offspringToRepair */
-		int *missing = find(offspringToRepair, offspringToRepair + cities, other[patchIndex]);
-
-		/* The element at other[patchIndex] is missing from offspringToRepair */
-		if(missing == (offspringToRepair + cities))
-		{
-			//cout << "1:" << offspringToRepair[missingIndex] << endl;
-			offspringToRepair[missingIndex] = other[patchIndex];
-			//cout << "2:" << offspringToRepair[missingIndex] << endl;
-			break;
-		}
-	}
-}
-
-void TSP::copyToNewPopulation(int const * const chromosone, size_t index)
-{
-	assert(index < chromosones && "Index out of bounds");
-	for(size_t i = 0; i < cities; ++i)
-	{
-		newPopulation[index][i] = chromosone[i];
-	}
-
-}
-
-int * TSP::rouletteSelection(double const * const fitness) const
-{
-	double sum = 0;
-	/* Calculate sum of all chromosome fitnesses in population */
-	for(size_t i = 0; i < chromosones; ++i)
-	{
-		sum += fitness[i];
-	}
-
-	/* 0.0 <= random <= sum */
-	double random = randomInclusive(sum);
-
-	sum = 0;
-	/* Go through the population and sum fitnesses from 0 to sum s. When the sum s is greater or equal to r; stop and return the chromosome where you are */
-	for(size_t i = 0; i < chromosones; ++i)
-	{
-		sum += fitness[i];
-		if(sum >= random)
-		{
-			return solutions[i];
-		}
-	}
-	assert(false && "A chromosone should have been picked by now");
-	return(NULL);
-}
-
-void TSP::setRandomPath(int * chromosone)
-{
-	for(size_t i = 0; i < cities; ++i)
-	{
-		chromosone[i] = i;
-	}
-
-	/*
-	 * Shuffle the chromosone using the Fisher–Yates shuffle.
-	 */
-	for(size_t i = cities-1; i > 0; --i)
-	{
-		/* 0 <= random <= i */
-		int random = (int)randomInclusive(i);
-		int temp = chromosone[i];
-		chromosone[i] = chromosone[random];
-		chromosone[random] = temp;
-	}
-}
-
-double TSP::evaluateFitness(int const * const chromosone) const
-{
-	return 1/totalDistance(chromosone);
-}
-
-double TSP::totalDistance(int const * const chromosone) const
-{
-	double distance = 0;
-	/* Calculate the total distance between all cities */
-	for(size_t i = 0; i < cities-1; ++i)
-	{
-		double dx = citiesX[chromosone[i]] - citiesX[chromosone[i+1]];
-		double dy = citiesY[chromosone[i]] - citiesY[chromosone[i+1]];
-
-		/* The distance between two points is the square root of (dx^2+dy^2) */
-		distance += sqrt((pow(dx, 2.0) + pow(dy, 2.0)));
-	}
-	/* We complete the tour by adding the distance between the last and the first city */
-	double dx = citiesX[chromosone[cities-1]] - citiesX[chromosone[0]];
-	double dy = citiesY[chromosone[cities-1]] - citiesY[chromosone[0]];
-	distance += sqrt((pow(dx, 2.0) + pow(dy, 2.0)));
-
-	return distance;
-}
-
-double TSP::randomInclusive(double max)
-{
-	/* Generate random number r, 0.0 <= r <= max */
-	//return ((double)rand() / (double)RAND_MAX * max);
-	return ((double)rand() * max) / (double)RAND_MAX;
-}
-
-double TSP::randomExclusive(double max)
-{
-	/* Generate random number r, 0.0 <= r < max */
-	//return ((double)rand() / ((double)RAND_MAX + 1) * max);
-	return ((double)rand() * max) / ((double)RAND_MAX + 1);
-}
-
-int main(int argc, const char *argv[])
-{
-	/* 90% mutation probability, 2% mutation probability */
-	TSP *tsp = new TSP(0.9, 0.02);
-	size_t generations = 0, generationsWithoutImprovement = 0;
-	double bestFitness = -1;
-	double initialAverage = tsp->getAverageDistance();
-	/* We'll stop when we've gone 10k generations without improvement */
-	while(generations < 10000)
-	{
-		tsp->nextPopulation();
-		++generations;
-		double newFitness = tsp->getBestFitness();
-		/* The new fitness is higher, the chromosone is better */
-		if(newFitness > bestFitness)
-		{
-			bestFitness = newFitness;
-			generationsWithoutImprovement = 0;
-		 //cout << "Best goal function: " << tsp->getBestFitness() << endl;
-		}
-		else
-		{
-			++generationsWithoutImprovement;
-		}
-	}
- //cout << "DONE!" << endl;
-	cout << "Number of generations: " << generations << endl;
-	cout << "Best chromosone info: " << endl;
-	cout << "\t-Path: " << tsp->getBestPathString() << endl;
-	cout << "\t-Goal function: " << tsp->getBestFitness() << endl;
-	cout << "\t-Distance: " << tsp->getLowestTotalDistance() << endl;
-	cout << "Average distance: " << tsp->getAverageDistance() << endl;
-	cout << "Initial average: " << initialAverage << endl;
-	delete tsp;
-	return 0;
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/cc_param_defaults.json b/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/cc_param_defaults.json
deleted file mode 100644
index 067a26573a08ea6956e407833fa9ca17faafa4bf..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/cc_param_defaults.json
+++ /dev/null
@@ -1 +0,0 @@
-{"max-pipeline-region-insns": {"default": 200, "max": 0, "min": 0}, "ipa-cp-loop-hint-bonus": {"default": 64, "max": 0, "min": 0}, "lim-expensive": {"default": 20, "max": 0, "min": 0}, "uninit-control-dep-attempts": {"default": 1000, "max": 0, "min": 1}, "lto-partitions": {"default": 32, "max": 0, "min": 1}, "max-inline-recursive-depth-auto": {"default": 8, "max": 0, "min": 0}, "max-unroll-times": {"default": 8, "max": 0, "min": 0}, "max-tail-merge-comparisons": {"default": 10, "max": 0, "min": 0}, "early-inlining-insns": {"default": 11, "max": 0, "min": 0}, "prefetch-latency": {"default": 200, "max": 0, "min": 0}, "partial-inlining-entry-probability": {"default": 70, "max": 0, "min": 0}, "integer-share-limit": {"default": 251, "max": 2, "min": 2}, "tm-max-aggregate-size": {"default": 9, "max": 0, "min": 0}, "ira-max-conflict-table-size": {"default": 1000, "max": 0, "min": 0}, "asan-instrument-reads": {"default": 1, "max": 1, "min": 0}, "lto-min-partition": {"default": 1000, "max": 0, "min": 0}, "hot-bb-frequency-fraction": {"default": 1000, "max": 0, "min": 0}, "min-vect-loop-bound": {"default": 1, "max": 0, "min": 1}, "max-crossjump-edges": {"default": 100, "max": 0, "min": 0}, "sms-dfa-history": {"default": 0, "max": 0, "min": 0}, "tracer-max-code-growth": {"default": 100, "max": 0, "min": 0}, "max-pipeline-region-blocks": {"default": 15, "max": 0, "min": 0}, "gcse-after-reload-partial-fraction": {"default": 3, "max": 0, "min": 0}, "asan-stack": {"default": 1, "max": 1, "min": 0}, "asan-memintrin": {"default": 1, "max": 1, "min": 0}, "large-function-insns": {"default": 2700, "max": 0, "min": 0}, "scev-max-expr-size": {"default": 100, "max": 0, "min": 0}, "iv-consider-all-candidates-bound": {"default": 30, "max": 0, "min": 0}, "max-partial-antic-length": {"default": 100, "max": 0, "min": 0}, "prefetch-min-insn-to-mem-ratio": {"default": 3, "max": 0, "min": 0}, "min-crossjump-insns": {"default": 5, "max": 0, "min": 1}, "asan-use-after-return": {"default": 1, "max": 1, "min": 0}, "allow-load-data-races": {"default": 1, "max": 1, "min": 0}, "max-jump-thread-duplication-stmts": {"default": 15, "max": 0, "min": 0}, "tracer-min-branch-probability": {"default": 50, "max": 100, "min": 0}, "l2-cache-size": {"default": 512, "max": 0, "min": 0}, "max-cse-insns": {"default": 1000, "max": 0, "min": 0}, "sched-pressure-algorithm": {"default": 1, "max": 2, "min": 1}, "max-unrolled-insns": {"default": 200, "max": 0, "min": 0}, "ipa-cp-value-list-size": {"default": 8, "max": 0, "min": 0}, "graphite-max-nb-scop-params": {"default": 10, "max": 0, "min": 0}, "max-completely-peel-times": {"default": 16, "max": 0, "min": 0}, "min-inline-recursive-probability": {"default": 10, "max": 0, "min": 0}, "max-stores-to-sink": {"default": 2, "max": 0, "min": 0}, "sink-frequency-threshold": {"default": 75, "max": 100, "min": 0}, "builtin-expect-probability": {"default": 90, "max": 100, "min": 0}, "max-average-unrolled-insns": {"default": 80, "max": 0, "min": 0}, "tracer-min-branch-ratio": {"default": 10, "max": 100, "min": 0}, "inline-unit-growth": {"default": 30, "max": 0, "min": 0}, "max-early-inliner-iterations": {"default": 1, "max": 0, "min": 0}, "hot-bb-count-ws-permille": {"default": 999, "max": 1000, "min": 0}, "max-gcse-memory": {"default": 52428800, "max": 0, "min": 0}, "ggc-min-expand": {"default": 30, "max": 0, "min": 0}, "tree-reassoc-width": {"default": 0, "max": 0, "min": 0}, "max-once-peeled-insns": {"default": 400, "max": 0, "min": 0}, "max-inline-recursive-depth": {"default": 8, "max": 0, "min": 0}, "max-inline-insns-recursive": {"default": 450, "max": 0, "min": 0}, "ira-loop-reserved-regs": {"default": 2, "max": 0, "min": 0}, "align-loop-iterations": {"default": 4, "max": 0, "min": 0}, "gcse-cost-distance-ratio": {"default": 10, "max": 0, "min": 0}, "sched-mem-true-dep-cost": {"default": 1, "max": 0, "min": 0}, "gcse-unrestricted-cost": {"default": 3, "max": 0, "min": 0}, "max-inline-insns-recursive-auto": {"default": 450, "max": 0, "min": 0}, "max-cse-path-length": {"default": 10, "max": 0, "min": 1}, "switch-conversion-max-branch-ratio": {"default": 8, "max": 0, "min": 1}, "max-tracked-strlens": {"default": 1000, "max": 0, "min": 0}, "inline-min-speedup": {"default": 10, "max": 0, "min": 0}, "max-cselib-memory-locations": {"default": 500, "max": 0, "min": 0}, "max-tail-merge-iterations": {"default": 2, "max": 0, "min": 0}, "max-inline-insns-auto": {"default": 40, "max": 0, "min": 0}, "min-insn-to-prefetch-ratio": {"default": 9, "max": 0, "min": 0}, "max-slsr-cand-scan": {"default": 50, "max": 999999, "min": 1}, "min-nondebug-insn-uid": {"default": 0, "max": 0, "min": 1}, "max-sched-region-blocks": {"default": 10, "max": 0, "min": 0}, "vect-max-version-for-alignment-checks": {"default": 6, "max": 0, "min": 0}, "max-vartrack-size": {"default": 50000000, "max": 0, "min": 0}, "loop-max-datarefs-for-datadeps": {"default": 1000, "max": 0, "min": 0}, "asan-instrument-writes": {"default": 1, "max": 1, "min": 0}, "asan-globals": {"default": 1, "max": 1, "min": 0}, "large-function-growth": {"default": 100, "max": 0, "min": 0}, "max-last-value-rtl": {"default": 10000, "max": 0, "min": 0}, "selsched-max-sched-times": {"default": 2, "max": 0, "min": 0}, "sms-max-ii-factor": {"default": 100, "max": 0, "min": 0}, "max-hoist-depth": {"default": 30, "max": 0, "min": 0}, "comdat-sharing-probability": {"default": 20, "max": 0, "min": 0}, "allow-store-data-races": {"default": 1, "max": 1, "min": 0}, "omega-max-vars": {"default": 128, "max": 0, "min": 0}, "iv-max-considered-uses": {"default": 250, "max": 0, "min": 0}, "max-inline-insns-single": {"default": 400, "max": 0, "min": 0}, "simultaneous-prefetches": {"default": 3, "max": 0, "min": 0}, "ipa-max-agg-items": {"default": 16, "max": 0, "min": 0}, "max-peel-times": {"default": 16, "max": 0, "min": 0}, "min-size-for-stack-sharing": {"default": 32, "max": 0, "min": 0}, "ira-max-loops-num": {"default": 100, "max": 0, "min": 0}, "tracer-dynamic-coverage": {"default": 75, "max": 100, "min": 0}, "max-gcse-insertion-ratio": {"default": 20, "max": 0, "min": 0}, "tracer-min-branch-probability-feedback": {"default": 80, "max": 100, "min": 0}, "max-sched-insn-conflict-delay": {"default": 3, "max": 10, "min": 1}, "max-peeled-insns": {"default": 100, "max": 0, "min": 0}, "max-dse-active-local-stores": {"default": 5000, "max": 0, "min": 0}, "max-variable-expansions-in-unroller": {"default": 1, "max": 0, "min": 0}, "max-delay-slot-live-search": {"default": 333, "max": 0, "min": 0}, "min-spec-prob": {"default": 40, "max": 0, "min": 0}, "loop-invariant-max-bbs-in-loop": {"default": 10000, "max": 0, "min": 0}, "selsched-insns-to-rename": {"default": 2, "max": 0, "min": 0}, "max-completely-peel-loop-nest-depth": {"default": 8, "max": 0, "min": 0}, "allow-packed-store-data-races": {"default": 1, "max": 1, "min": 0}, "omega-eliminate-redundant-constraints": {"default": 0, "max": 1, "min": 0}, "omega-max-geqs": {"default": 256, "max": 0, "min": 0}, "l1-cache-line-size": {"default": 32, "max": 0, "min": 0}, "case-values-threshold": {"default": 0, "max": 0, "min": 0}, "max-pending-list-length": {"default": 32, "max": 0, "min": 0}, "sccvn-max-alias-queries-per-access": {"default": 1000, "max": 0, "min": 0}, "max-vartrack-expr-depth": {"default": 12, "max": 0, "min": 0}, "loop-block-tile-size": {"default": 51, "max": 0, "min": 0}, "sms-loop-average-count-threshold": {"default": 0, "max": 0, "min": 0}, "vect-max-peeling-for-alignment": {"default": -1, "max": 64, "min": -1}, "selsched-max-lookahead": {"default": 50, "max": 0, "min": 0}, "omega-max-keys": {"default": 500, "max": 0, "min": 0}, "sccvn-max-scc-size": {"default": 10000, "max": 0, "min": 10}, "predictable-branch-outcome": {"default": 2, "max": 50, "min": 0}, "ssp-buffer-size": {"default": 8, "max": 0, "min": 1}, "max-delay-slot-insn-search": {"default": 100, "max": 0, "min": 0}, "sms-min-sc": {"default": 2, "max": 1, "min": 1}, "lra-max-considered-reload-pseudos": {"default": 500, "max": 0, "min": 0}, "tracer-dynamic-coverage-feedback": {"default": 95, "max": 100, "min": 0}, "omega-max-eqs": {"default": 128, "max": 0, "min": 0}, "max-fields-for-field-sensitive": {"default": 0, "max": 0, "min": 0}, "max-sched-region-insns": {"default": 100, "max": 0, "min": 0}, "large-stack-frame-growth": {"default": 1000, "max": 0, "min": 0}, "omega-max-wild-cards": {"default": 18, "max": 0, "min": 0}, "max-sched-extend-regions-iters": {"default": 0, "max": 0, "min": 0}, "max-unswitch-insns": {"default": 50, "max": 0, "min": 0}, "ipcp-unit-growth": {"default": 10, "max": 0, "min": 0}, "max-unswitch-level": {"default": 3, "max": 0, "min": 0}, "l1-cache-size": {"default": 64, "max": 0, "min": 0}, "max-grow-copy-bb-insns": {"default": 8, "max": 0, "min": 0}, "max-iterations-computation-cost": {"default": 10, "max": 0, "min": 0}, "ipa-cp-array-index-hint-bonus": {"default": 48, "max": 0, "min": 0}, "ggc-min-heapsize": {"default": 4096, "max": 0, "min": 0}, "align-threshold": {"default": 100, "max": 0, "min": 1}, "graphite-max-bbs-per-function": {"default": 100, "max": 0, "min": 0}, "max-vartrack-reverse-op-size": {"default": 50, "max": 0, "min": 0}, "ipa-sra-ptr-growth-factor": {"default": 2, "max": 0, "min": 0}, "max-completely-peeled-insns": {"default": 100, "max": 0, "min": 0}, "ipa-cp-eval-threshold": {"default": 500, "max": 0, "min": 0}, "large-stack-frame": {"default": 256, "max": 0, "min": 0}, "max-modulo-backtrack-attempts": {"default": 40, "max": 0, "min": 0}, "omega-hash-table-size": {"default": 550, "max": 0, "min": 0}, "max-goto-duplication-insns": {"default": 8, "max": 0, "min": 0}, "max-sched-ready-insns": {"default": 100, "max": 0, "min": 0}, "max-iterations-to-track": {"default": 1000, "max": 0, "min": 0}, "scev-max-expr-complexity": {"default": 10, "max": 0, "min": 0}, "cxx-max-namespaces-for-diagnostic-help": {"default": 1000, "max": 0, "min": 0}, "max-reload-search-insns": {"default": 100, "max": 0, "min": 0}, "use-canonical-types": {"default": 1, "max": 1, "min": 0}, "gcse-after-reload-critical-fraction": {"default": 10, "max": 0, "min": 0}, "sched-state-edge-prob-cutoff": {"default": 10, "max": 100, "min": 0}, "sched-spec-prob-cutoff": {"default": 40, "max": 100, "min": 0}, "unlikely-bb-count-fraction": {"default": 20, "max": 10000, "min": 1}, "slp-max-insns-in-bb": {"default": 1000, "max": 0, "min": 0}, "max-peel-branches": {"default": 32, "max": 0, "min": 0}, "large-unit-insns": {"default": 10000, "max": 0, "min": 0}, "iv-always-prune-cand-set-bound": {"default": 10, "max": 0, "min": 0}, "vect-max-version-for-alias-checks": {"default": 10, "max": 0, "min": 0}, "max-predicted-iterations": {"default": 100, "max": 0, "min": 0}, "allow-packed-load-data-races": {"default": 1, "max": 1, "min": 0}}
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/gccflags.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/gccflags.py
deleted file mode 100755
index 1edc5bb8a3ce886e968a5f7d7d2e4f362ff8940b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/gccflags.py
+++ /dev/null
@@ -1,412 +0,0 @@
-#!/usr/bin/env python
-import adddeps  # fix sys.path
-
-import math
-import argparse
-import ast
-import collections
-import json
-import logging
-import opentuner
-import os
-import random
-import re
-import shutil
-import subprocess
-import sys
-
-from opentuner.resultsdb.models import Result, TuningRun
-from opentuner.search import manipulator
-
-FLAGS_WORKING_CACHE_FILE = 'cc_flags.json'
-PARAMS_DEFAULTS_CACHE_FILE = 'cc_param_defaults.json'
-PARAMS_DEF_PATH = '~/gcc-4.9.0/gcc/params.def'
-PARAMS_WORKING_CACHE_FILE = 'cc_params.json'
-
-log = logging.getLogger('gccflags')
-
-argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-argparser.add_argument('source', help='source file to compile')
-argparser.add_argument('--compile-template',
-                       default='{cc} {source} -o {output} -lpthread {flags}',
-                       help='command to compile {source} into {output} with'
-                            ' {flags}')
-argparser.add_argument('--compile-limit', type=float, default=30,
-                       help='kill gcc if it runs more than {default} sec')
-argparser.add_argument('--scaler', type=int, default=4,
-                       help='by what factor to try increasing parameters')
-argparser.add_argument('--cc', default='g++', help='g++ or gcc')
-argparser.add_argument('--output', default='./tmp.bin',
-                       help='temporary file for compiler to write to')
-argparser.add_argument('--debug', action='store_true',
-                       help='on gcc errors try to find minimal set '
-                            'of args to reproduce error')
-argparser.add_argument('--force-killall', action='store_true',
-                       help='killall cc1plus before each collection')
-argparser.add_argument('--memory-limit', default=1024 ** 3, type=int,
-                       help='memory limit for child process')
-argparser.add_argument('--no-cached-flags', action='store_true',
-                       help='regenerate the lists of legal flags each time')
-argparser.add_argument('--flags-histogram', action='store_true',
-                       help='print out a histogram of flags')
-argparser.add_argument('--flag-importance',
-                       help='Test the importance of different flags from a '
-                            'given json file.')
-
-
-class GccFlagsTuner(opentuner.measurement.MeasurementInterface):
-  def __init__(self, *pargs, **kwargs):
-    super(GccFlagsTuner, self).__init__(program_name=args.source, *pargs,
-                                        **kwargs)
-    self.gcc_version = self.extract_gcc_version()
-    self.cc_flags = self.extract_working_flags()
-    self.cc_param_defaults = self.extract_param_defaults()
-    self.cc_params = self.extract_working_params()
-
-    # these bugs are hardcoded for now
-    # sets of options which causes gcc to barf
-    if True:
-      # These bugs were for gcc 4.7 on ubuntu
-      self.cc_bugs = (['-fipa-matrix-reorg', '-fwhole-program'],
-                      ['-fno-tree-coalesce-inlined-vars'],
-                      ['-fno-inline-atomics'],
-                      ['-ftoplevel-reorder', '-fno-unit-at-a-time'])
-    else:
-      # Bugs for gcc 4.9 (work in progress, incomplete list)
-      self.cc_bugs = (['-ftoplevel-reorder', '-fno-unit-at-a-time'], )
-
-    self.result_list = {}
-    self.parallel_compile = True
-    try:
-      os.stat('./tmp')
-    except OSError:
-      os.mkdir('./tmp')
-    self.run_baselines()
-
-  def run_baselines(self):
-    log.info("baseline perfs -O0=%.4f -O1=%.4f -O2=%.4f -O3=%.4f",
-             *[self.run_with_flags(['-O%d' % i], None).time
-               for i in range(4)])
-
-  def extract_gcc_version(self):
-    m = re.search(r'([0-9]+)[.]([0-9]+)[.]([0-9]+)', subprocess.check_output([
-        self.args.cc, '--version']))
-    if m:
-      gcc_version = tuple(map(int, m.group(1, 2, 3)))
-    else:
-      gcc_version = None
-    log.debug('gcc version %s', gcc_version)
-    return gcc_version
-
-  def extract_working_flags(self):
-    """
-    Figure out which gcc flags work (don't cause gcc to barf) by running
-    each one.
-    """
-    if os.path.isfile(FLAGS_WORKING_CACHE_FILE) and not args.no_cached_flags:
-      # use cached version
-      found_cc_flags = json.load(open(FLAGS_WORKING_CACHE_FILE))
-    else:
-      # extract flags from --help=optimizers
-      optimizers, err = subprocess.Popen([self.args.cc, '--help=optimizers'],
-                                         stdout=subprocess.PIPE).communicate()
-      found_cc_flags = re.findall(r'^  (-f[a-z0-9-]+) ', optimizers,
-                                  re.MULTILINE)
-      log.info('Determining which of %s possible gcc flags work',
-               len(found_cc_flags))
-      found_cc_flags = filter(self.check_if_flag_works, found_cc_flags)
-      json.dump(found_cc_flags, open(FLAGS_WORKING_CACHE_FILE, 'w'))
-    return found_cc_flags
-
-  def extract_param_defaults(self):
-    """
-    Get the default, minimum, and maximum for each gcc parameter.
-    Requires source code for gcc to be in your home directory.
-    This example ships with a cached version so it does not require source.
-    """
-    if os.path.isfile(PARAMS_DEFAULTS_CACHE_FILE) and not args.no_cached_flags:
-      # use cached version
-      param_defaults = json.load(open(PARAMS_DEFAULTS_CACHE_FILE))
-    else:
-      # default values of params need to be extracted from source code,
-      # since they are not in --help
-      param_defaults = dict()
-      params_def = open(os.path.expanduser(PARAMS_DEF_PATH)).read()
-      for m in re.finditer(r'DEFPARAM *\((([^")]|"[^"]*")*)\)', params_def):
-        param_def_str = (m.group(1)
-                         #  Hacks!!!
-                         .replace('GGC_MIN_EXPAND_DEFAULT', '30')
-                         .replace('GGC_MIN_HEAPSIZE_DEFAULT', '4096')
-                         .replace('50 * 1024 * 1024', '52428800'))
-        try:
-          name, desc, default, param_min, param_max = ast.literal_eval(
-              '[' + param_def_str.split(',', 1)[1] + ']')
-          param_defaults[name] = {'default': default,
-                                  'min': param_min,
-                                  'max': param_max}
-        except:
-          log.exception("error with %s", param_def_str)
-      json.dump(param_defaults, open(PARAMS_DEFAULTS_CACHE_FILE, 'w'))
-    return param_defaults
-
-  def extract_working_params(self):
-    """
-    Figure out which gcc params work (don't cause gcc to barf) by running
-    each one to test.
-    """
-    params, err = subprocess.Popen(
-        [self.args.cc, '--help=params'], stdout=subprocess.PIPE).communicate()
-    all_params = re.findall(r'^  ([a-z0-9-]+) ', params, re.MULTILINE)
-    all_params = sorted(set(all_params) &
-                        set(self.cc_param_defaults.keys()))
-    if os.path.isfile(PARAMS_WORKING_CACHE_FILE) and not args.no_cached_flags:
-      # use cached version
-      return json.load(open(PARAMS_WORKING_CACHE_FILE))
-    else:
-      log.info('Determining which of %s possible gcc params work',
-               len(all_params))
-      working_params = []
-      for param in all_params:
-        if self.check_if_flag_works('--param={}={}'.format(
-                param, self.cc_param_defaults[param]['default'])):
-          working_params.append(param)
-      json.dump(working_params, open(PARAMS_WORKING_CACHE_FILE, 'w'))
-      return working_params
-
-  def check_if_flag_works(self, flag, try_inverted=True):
-    cmd = args.compile_template.format(source=args.source, output=args.output,
-                                       flags=flag, cc=args.cc)
-    compile_result = self.call_program(cmd, limit=args.compile_limit)
-    if compile_result['returncode'] != 0:
-      log.warning("removing flag %s because it results in compile error", flag)
-      return False
-    if 'warning: this target' in compile_result['stderr']:
-      log.warning("removing flag %s because not supported by target", flag)
-      return False
-    if 'has been renamed' in compile_result['stderr']:
-      log.warning("removing flag %s because renamed", flag)
-      return False
-    if try_inverted and flag[:2] == '-f':
-      if not self.check_if_flag_works(invert_gcc_flag(flag),
-                                      try_inverted=False):
-        log.warning("Odd... %s works but %s does not", flag,
-                    invert_gcc_flag(flag))
-        return False
-    return True
-
-  def manipulator(self):
-    m = manipulator.ConfigurationManipulator()
-    m.add_parameter(manipulator.IntegerParameter('-O', 0, 3))
-    for flag in self.cc_flags:
-      m.add_parameter(manipulator.EnumParameter(flag, ['on', 'off', 'default']))
-    for param in self.cc_params:
-      defaults = self.cc_param_defaults[param]
-      if defaults['max'] <= defaults['min']:
-        defaults['max'] = float('inf')
-      defaults['max'] = min(defaults['max'],
-                            max(1, defaults['default']) * args.scaler)
-      defaults['min'] = max(defaults['min'],
-                            max(1, defaults['default']) / args.scaler)
-
-      if param == 'l1-cache-line-size':
-        # gcc requires this to be a power of two or it internal errors
-        m.add_parameter(manipulator.PowerOfTwoParameter(param, 4, 256))
-      elif defaults['max'] > 128:
-        m.add_parameter(manipulator.LogIntegerParameter(
-            param, defaults['min'], defaults['max']))
-      else:
-        m.add_parameter(manipulator.IntegerParameter(
-            param, defaults['min'], defaults['max']))
-
-    return m
-
-  def cfg_to_flags(self, cfg):
-    flags = ['-O%d' % cfg['-O']]
-    for flag in self.cc_flags:
-      if cfg[flag] == 'on':
-        flags.append(flag)
-      elif cfg[flag] == 'off':
-        flags.append(invert_gcc_flag(flag))
-
-    for param in self.cc_params:
-      flags.append('--param=%s=%d' % (param, cfg[param]))
-
-    # workaround sets of flags that trigger compiler crashes/hangs
-    for bugset in self.cc_bugs:
-      if len(set(bugset) & set(flags)) == len(bugset):
-        flags.remove(bugset[-1])
-    return flags
-
-  def make_command(self, cfg):
-    return args.compile_template.format(source=args.source, output=args.output,
-                                        flags=' '.join(self.cfg_to_flags(cfg)),
-                                        cc=args.cc)
-
-  def get_tmpdir(self, result_id):
-    return './tmp/%d' % result_id
-
-  def cleanup(self, result_id):
-    tmp_dir = self.get_tmpdir(result_id)
-    shutil.rmtree(tmp_dir)
-
-  def compile_and_run(self, desired_result, input, limit):
-    cfg = desired_result.configuration.data
-    compile_result = self.compile(cfg, 0)
-    return self.run_precompiled(desired_result, input, limit, compile_result, 0)
-
-  compile_results = {'ok': 0, 'timeout': 1, 'error': 2}
-
-  def run_precompiled(self, desired_result, input, limit, compile_result,
-                      result_id):
-    if self.args.force_killall:
-      os.system('killall -9 cc1plus 2>/dev/null')
-    # Make sure compile was successful
-    if compile_result == self.compile_results['timeout']:
-      return Result(state='TIMEOUT', time=float('inf'))
-    elif compile_result == self.compile_results['error']:
-      return Result(state='ERROR', time=float('inf'))
-
-    tmp_dir = self.get_tmpdir(result_id)
-    output_dir = '%s/%s' % (tmp_dir, args.output)
-    try:
-      run_result = self.call_program([output_dir], limit=limit,
-                                     memory_limit=args.memory_limit)
-    except OSError:
-      return Result(state='ERROR', time=float('inf'))
-
-    if run_result['returncode'] != 0:
-      if run_result['timeout']:
-        return Result(state='TIMEOUT', time=float('inf'))
-      else:
-        log.error('program error')
-        return Result(state='ERROR', time=float('inf'))
-
-    return Result(time=run_result['time'])
-
-  def debug_gcc_error(self, flags):
-    def fails(subflags):
-      cmd = args.compile_template.format(source=args.source, output=args.output,
-                                         flags=' '.join(subflags),
-                                         cc=args.cc)
-      compile_result = self.call_program(cmd, limit=args.compile_limit)
-      return compile_result['returncode'] != 0
-
-    if self.args.debug:
-      while len(flags) > 8:
-        log.error("compile error with %d flags, diagnosing...", len(flags))
-        tmpflags = filter(lambda x: random.choice((True, False)), flags)
-        if fails(tmpflags):
-          flags = tmpflags
-
-      # linear scan
-      minimal_flags = []
-      for i in xrange(len(flags)):
-        tmpflags = minimal_flags + flags[i + 1:]
-        if not fails(tmpflags):
-          minimal_flags.append(flags[i])
-      log.error("compiler crashes/hangs with flags: %s", minimal_flags)
-
-  def compile(self, config_data, result_id):
-    flags = self.cfg_to_flags(config_data)
-    return self.compile_with_flags(flags, result_id)
-
-  def compile_with_flags(self, flags, result_id):
-    tmp_dir = self.get_tmpdir(result_id)
-    try:
-      os.stat(tmp_dir)
-    except OSError:
-      os.mkdir(tmp_dir)
-    output_dir = '%s/%s' % (tmp_dir, args.output)
-    cmd = args.compile_template.format(source=args.source, output=output_dir,
-                                       flags=' '.join(flags),
-                                       cc=args.cc)
-
-    compile_result = self.call_program(cmd, limit=args.compile_limit,
-                                       memory_limit=args.memory_limit)
-    if compile_result['returncode'] != 0:
-      if compile_result['timeout']:
-        log.warning("gcc timeout")
-        return self.compile_results['timeout']
-      else:
-        log.warning("gcc error %s", compile_result['stderr'])
-        self.debug_gcc_error(flags)
-        return self.compile_results['error']
-    return self.compile_results['ok']
-
-  def run_with_flags(self, flags, limit):
-    return self.run_precompiled(None, None, limit,
-                                self.compile_with_flags(flags, 0), 0)
-
-  def save_final_config(self, configuration):
-    """called at the end of tuning"""
-    print "Best flags written to gccflags_final_config.{json,cmd}"
-    self.manipulator().save_to_file(configuration.data,
-                                    'gccflags_final_config.json')
-    with open('gccflags_final_config.cmd', 'w') as fd:
-      fd.write(self.make_command(configuration.data))
-
-  def flags_histogram(self, session):
-    counter = collections.Counter()
-    q = session.query(TuningRun).filter_by(state='COMPLETE')
-    total = q.count()
-    for tr in q:
-      print tr.program.name
-      for flag in self.cfg_to_flags(tr.final_config.data):
-        counter[flag] += 1.0 / total
-    print counter.most_common(20)
-
-  def flag_importance(self):
-    """
-    Test the importance of each flag by measuring the performance with that
-    flag removed.  Print out a table for paper
-    """
-    with open(self.args.flag_importance) as fd:
-      best_cfg = json.load(fd)
-    flags = self.cfg_to_flags(best_cfg)
-    counter = collections.Counter()
-    baseline_time = self.flags_mean_time(flags)
-    for flag in flags[1:]:
-      delta_flags = [f for f in flags if f != flag]
-      flag_time = self.flags_mean_time(delta_flags)
-      impact = max(0.0, flag_time - baseline_time)
-      if math.isinf(impact):
-        impact = 0.0
-      counter[flag] = impact
-      print flag, '{:.4f}'.format(impact)
-    total_impact = sum(counter.values())
-    remaining_impact = total_impact
-    print r'\bf Flag & \bf Importance \\\hline'
-    for flag, impact in counter.most_common(20):
-      print r'{} & {:.1f}\% \\\hline'.format(flag, 100.0 * impact / total_impact)
-      remaining_impact -= impact
-    print r'{} other flags & {:.1f}% \\\hline'.format(
-      len(flags) - 20, 100.0 * remaining_impact / total_impact)
-
-  def flags_mean_time(self, flags, trials=10):
-    precompiled = self.compile_with_flags(flags, 0)
-    total = 0.0
-    for _ in xrange(trials):
-      total += self.run_precompiled(None, None, None, precompiled, 0).time
-    return total / trials
-
-  def prefix_hook(self, session):
-    if self.args.flags_histogram:
-      self.flags_histogram(session)
-      sys.exit(0)
-    if self.args.flag_importance:
-      self.flag_importance()
-      sys.exit(0)
-
-
-
-def invert_gcc_flag(flag):
-  assert flag[:2] == '-f'
-  if flag[2:5] != 'no-':
-    return '-fno-' + flag[2:]
-  return '-f' + flag[5:]
-
-
-if __name__ == '__main__':
-  opentuner.init_logging()
-  args = argparser.parse_args()
-  GccFlagsTuner.main(args)
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/gccflags_minimal.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/gccflags_minimal.py
deleted file mode 100755
index 0363b984a8c67064102e9025ce57d388c2585514..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/gccflags/gccflags_minimal.py
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env python
-#
-# Autotune flags to g++ to optimize the performance of apps/raytracer.cpp
-#
-# This is an extremely simplified version meant only for tutorials
-#
-import adddeps  # fix sys.path
-
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import EnumParameter
-from opentuner import IntegerParameter
-from opentuner import MeasurementInterface
-from opentuner import Result
-
-GCC_FLAGS = [
-  'align-functions', 'align-jumps', 'align-labels',
-  'align-loops', 'asynchronous-unwind-tables',
-  'branch-count-reg', 'branch-probabilities',
-  # ... (176 total)
-]
-
-# (name, min, max)
-GCC_PARAMS = [
-  ('early-inlining-insns', 0, 1000),
-  ('gcse-cost-distance-ratio', 0, 100),
-  ('iv-max-considered-uses', 0, 1000),
-  # ... (145 total)
-]
-
-
-class GccFlagsTuner(MeasurementInterface):
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-    manipulator.add_parameter(
-      IntegerParameter('opt_level', 0, 3))
-    for flag in GCC_FLAGS:
-      manipulator.add_parameter(
-        EnumParameter(flag,
-                      ['on', 'off', 'default']))
-    for param, min, max in GCC_PARAMS:
-      manipulator.add_parameter(
-        IntegerParameter(param, min, max))
-    return manipulator
-
-  def compile(self, cfg, id):
-    """
-    Compile a given configuration in parallel
-    """
-    gcc_cmd = 'g++ apps/raytracer.cpp -o ./tmp{0}.bin'.format(id)
-    gcc_cmd += ' -O{0}'.format(cfg['opt_level'])
-    for flag in GCC_FLAGS:
-      if cfg[flag] == 'on':
-        gcc_cmd += ' -f{0}'.format(flag)
-      elif cfg[flag] == 'off':
-        gcc_cmd += ' -fno-{0}'.format(flag)
-    for param, min, max in GCC_PARAMS:
-      gcc_cmd += ' --param {0}={1}'.format(
-        param, cfg[param])
-    return self.call_program(gcc_cmd)
-  
-  def run_precompiled(self, desired_result, input, limit, compile_result, id):
-    """
-    Run a compile_result from compile() sequentially and return performance
-    """
-    assert compile_result['returncode'] == 0
-
-    try:    
-        run_result = self.call_program('./tmp{0}.bin'.format(id))
-        assert run_result['returncode'] == 0
-    finally:
-        self.call_program('rm ./tmp{0}.bin'.format(id))
-
-    return Result(time=run_result['time'])
-
-  def compile_and_run(self, desired_result, input, limit):
-    """
-    Compile and run a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    compile_result = self.compile(cfg, 0)
-    return self.run_precompiled(desired_result, input, limit, compile_result, 0)
-
-if __name__ == '__main__':
-  argparser = opentuner.default_argparser()
-  GccFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/.gitignore b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/.gitignore
deleted file mode 100644
index ebdc2a395f4c8b509233d88992512c4cf4ae3364..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-dump-call-graph
-*.callgraph
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/adddeps.py
deleted file mode 100644
index ede22a8fcdb2a94db7915ff3beb90894b2cb8592..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/adddeps.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/bilateral_grid.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/bilateral_grid.cpp
deleted file mode 100644
index 6f1c97ffb85967223bf4e2ebc16d8ae0c2bcd02b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/bilateral_grid.cpp
+++ /dev/null
@@ -1,93 +0,0 @@
-#include "Halide.h"
-#include <stdio.h>
-
-using namespace Halide;
-
-int main(int argc, char **argv) {
-  // if (argc < 2) {
-  //     printf("Usage: bilateral_grid <s_sigma>\n");
-  //     // printf("Spatial sigma is a compile-time parameter, please provide it as an argument.\n"
-  //     //        "(llvm's ptx backend doesn't handle integer mods by non-consts yet)\n");
-  //     return 0;
-  // }
-
-    ImageParam input(Float(32), 2);
-    float r_sigma = 0.1;
-   // int s_sigma = atoi(argv[1]);
-    int s_sigma = 4;
-    Var x("x"), y("y"), z("z"), c("c");
-
-    // Add a boundary condition
-    Func clamped("clamped");
-    clamped(x, y) = input(clamp(x, 0, input.width()-1),
-                          clamp(y, 0, input.height()-1));
-
-    // Construct the bilateral grid
-    RDom r(0, s_sigma, 0, s_sigma);
-    Expr val = clamped(x * s_sigma + r.x - s_sigma/2, y * s_sigma + r.y - s_sigma/2);
-    val = clamp(val, 0.0f, 1.0f);
-    Expr zi = cast<int>(val * (1.0f/r_sigma) + 0.5f);
-    Func grid("grid"), histogram("histogram");
-    histogram(x, y, zi, c) += select(c == 0, val, 1.0f);
-
-    // Introduce a dummy function, so we can schedule the histogram within it
-    grid(x, y, z, c) = histogram(x, y, z, c);
-
-    // Blur the grid using a five-tap filter
-    Func blurx("blurx"), blury("blury"), blurz("blurz");
-    blurx(x, y, z, _) = grid(x-2, y, z, _) + grid(x-1, y, z, _)*4 + grid(x, y, z, _)*6 + grid(x+1, y, z, _)*4 + grid(x+2, y, z, _);
-    blury(x, y, z, _) = blurx(x, y-2, z, _) + blurx(x, y-1, z, _)*4 + blurx(x, y, z, _)*6 + blurx(x, y+1, z, _)*4 + blurx(x, y+2, z, _);
-    blurz(x, y, z, _) = blury(x, y, z-2, _) + blury(x, y, z-1, _)*4 + blury(x, y, z, _)*6 + blury(x, y, z+1, _)*4 + blury(x, y, z+2, _);
-
-    // Take trilinear samples to compute the output
-    val = clamp(clamped(x, y), 0.0f, 1.0f);
-    Expr zv = val * (1.0f/r_sigma);
-    zi = cast<int>(zv);
-    Expr zf = zv - zi;
-    Expr xf = cast<float>(x % s_sigma) / s_sigma;
-    Expr yf = cast<float>(y % s_sigma) / s_sigma;
-    Expr xi = x/s_sigma;
-    Expr yi = y/s_sigma;
-    Func interpolated("interpolated");
-    interpolated(x, y, _) =
-        lerp(lerp(lerp(blurz(xi, yi, zi, _), blurz(xi+1, yi, zi, _), xf),
-                  lerp(blurz(xi, yi+1, zi, _), blurz(xi+1, yi+1, zi, _), xf), yf),
-             lerp(lerp(blurz(xi, yi, zi+1, _), blurz(xi+1, yi, zi+1, _), xf),
-                  lerp(blurz(xi, yi+1, zi+1, _), blurz(xi+1, yi+1, zi+1, _), xf), yf), zf);
-
-    // Normalize
-    Func bilateral_grid("bilateral_grid");
-    bilateral_grid(x, y) = interpolated(x, y, 0)/interpolated(x, y, 1);
-
-    AUTOTUNE_HOOK(bilateral_grid);
-
-    char *target = getenv("HL_TARGET");
-    if (target && std::string(target) == "ptx") {
-
-        // GPU schedule
-        grid.compute_root().reorder(z, c, x, y).cuda_tile(x, y, 8, 8);
-
-        // Compute the histogram into shared memory before spilling it to global memory
-        histogram.store_at(grid, Var("blockidx")).compute_at(grid, Var("threadidx"));
-
-        blurx.compute_root().cuda_tile(x, y, z, 16, 16, 1);
-        blury.compute_root().cuda_tile(x, y, z, 16, 16, 1);
-        blurz.compute_root().cuda_tile(x, y, z, 8, 8, 4);
-        bilateral_grid.compute_root().cuda_tile(x, y, s_sigma, s_sigma);
-    } else {
-
-        // CPU schedule
-        grid.compute_root().reorder(c, z, x, y).parallel(y);
-        histogram.compute_at(grid, x).unroll(c);
-        blurx.compute_root().parallel(z).vectorize(x, 4);
-        blury.compute_root().parallel(z).vectorize(x, 4);
-        blurz.compute_root().parallel(z).vectorize(x, 4);
-        bilateral_grid.compute_root().parallel(y).vectorize(x, 4);
-    }
-
-    BASELINE_HOOK(bilateral_grid);
-
-   //bilateral_grid.compile_to_file("bilateral_grid", r_sigma, input);
-
-    return 0;
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/bilateral_grid.settings b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/bilateral_grid.settings
deleted file mode 100644
index 7b829b779a9f7d05ef7b677ea307430728e29f16..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/bilateral_grid.settings
+++ /dev/null
@@ -1,10 +0,0 @@
-{"input_size": "2048, 2048",
- "functions": [
-  {"name": "clamped", "vars": ["x", "y"], "calls": []},
-  {"name": "histogram", "vars": ["x", "y", "c"], "calls": ["clamped"]},
-  {"name": "grid", "vars": ["x", "y", "z", "c"], "calls": ["histogram"]},
-  {"name": "blurx", "vars": ["x", "y", "z"], "calls": ["grid"]},
-  {"name": "blury", "vars": ["x", "y", "z"], "calls": ["blurx"]},
-  {"name": "blurz", "vars": ["x", "y", "z"], "calls": ["blury"]},
-  {"name": "interpolated", "vars": ["x", "y"], "calls": ["blurz", "clamped"]},
-  {"name": "bilateral_grid", "vars": ["x", "y"], "calls": ["interpolated"]}]}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/halide_blur.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/halide_blur.cpp
deleted file mode 100644
index 7a38dd45fd8bf48ecc1d7489efe991dd320c0b63..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/halide_blur.cpp
+++ /dev/null
@@ -1,32 +0,0 @@
-#include <Halide.h>
-using namespace Halide;
-
-#define AUTOTUNE_HOOK(x)
-#define BASELINE_HOOK(x)
-
-int main(int argc, char **argv) {
-
-    ImageParam in_img(UInt(16), 2);
-    Func blur_x("blur_x"), blur_y("blur_y");
-    Var x("x"), y("y"), xi("xi"), yi("yi");
-
-    Func input;
-    input(x,y) = in_img(clamp(x, 1, in_img.width()-1),
-                        clamp(y, 1, in_img.height())-1);
-
-    // The algorithm
-    blur_x(x, y) = (input(x, y) + input(x+1, y) + input(x+2, y))/3;
-    blur_y(x, y) = (blur_x(x, y) + blur_x(x, y+1) + blur_x(x, y+2))/3;
-
-    AUTOTUNE_HOOK(blur_y);
-
-    // How to schedule it
-    blur_y.split(y, y, yi, 8).parallel(y).vectorize(x, 8);
-    blur_x.store_at(blur_y, y).compute_at(blur_y, yi).vectorize(x, 8);  
-
-    BASELINE_HOOK(blur_y);
-
-    blur_y.compile_to_file("halide_blur", in_img); 
-
-    return 0;
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/halide_blur.settings b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/halide_blur.settings
deleted file mode 100644
index af0deeac34966616ff0f0af7a008c0c6f74ef2cb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/halide_blur.settings
+++ /dev/null
@@ -1,4 +0,0 @@
-{"input_size": "4096, 4096",
- "functions": [
-               {"name": "blur_x", "vars": ["x", "y"], "calls": []},
-               {"name": "blur_y", "vars": ["x", "y"], "calls": ["blur_x"]}]}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simple.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simple.cpp
deleted file mode 100644
index 74d141721db7e22667505f35d1c894d081ae064d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simple.cpp
+++ /dev/null
@@ -1,208 +0,0 @@
-#include "Halide.h"
-
-#define AUTOTUNE_HOOK(x)
-#define BASELINE_HOOK(x)
-
-using namespace Halide;
-
-#include <iostream>
-#include <limits>
-
-#include <sys/time.h>
-
-using std::vector;
-
-double now() {
-    struct timeval tv;
-    gettimeofday(&tv, NULL);
-    static bool first_call = true;
-    static time_t first_sec = 0;
-    if (first_call) {
-        first_call = false;
-        first_sec = tv.tv_sec;
-    }
-    assert(tv.tv_sec >= first_sec);
-    return (tv.tv_sec - first_sec) + (tv.tv_usec / 1000000.0);
-}
-
-int main(int argc, char **argv) {
-    ImageParam input(Float(32), 3, "input");
-
-    const unsigned int levels = 3;
-
-    Func downsampled[levels];
-    Func downx[levels];
-    Func interpolated[levels];
-    Func upsampled[levels];
-    Func upsampledx[levels];
-    Var x("x"), y("y"), c("c");
-
-    downsampled[0] = Func("downsampled");
-    downx[0] = Func("downx");
-    interpolated[0] = Func("interpolated");
-    upsampled[0] = Func("upsampled");
-    upsampledx[0] = Func("upsampledx");
-
-    Func clamped("clamped");
-    clamped(x, y, c) = input(clamp(x, 0, input.width()-1), clamp(y, 0, input.height()-1), c);
-
-    // This triggers a bug in llvm 3.3 (3.2 and trunk are fine), so we
-    // rewrite it in a way that doesn't trigger the bug. The rewritten
-    // form assumes the input alpha is zero or one.
-    // downsampled[0](x, y, c) = select(c < 3, clamped(x, y, c) * clamped(x, y, 3), clamped(x, y, 3));
-    downsampled[0](x, y, c) = clamped(x, y, c) * clamped(x, y, 3);
-
-    for (unsigned int l = 1; l < levels; ++l) {
-        downx[l] = Func("downx");
-        downsampled[l] = Func("downsampled");
-        downx[l](x, y, c) = (downsampled[l-1](x*2-1, y, c) +
-                             2.0f * downsampled[l-1](x*2, y, c) +
-                             downsampled[l-1](x*2+1, y, c)) * 0.25f;
-        downsampled[l](x, y, c) = (downx[l](x, y*2-1, c) +
-                                   2.0f * downx[l](x, y*2, c) +
-                                   downx[l](x, y*2+1, c)) * 0.25f;
-    }
-    interpolated[levels-1] = Func("interpolated");
-    interpolated[levels-1](x, y, c) = downsampled[levels-1](x, y, c);
-    for (unsigned int l = levels-2; l < levels; --l) {
-        upsampledx[l] = Func("upsampledx");
-        upsampled[l] = Func("upsampled");
-        interpolated[l] = Func("interpolated");
-        upsampledx[l](x, y, c) = select((x % 2) == 0,
-                                        interpolated[l+1](x/2, y, c),
-                                        0.5f * (interpolated[l+1](x/2, y, c) +
-                                                interpolated[l+1](x/2+1, y, c)));
-        upsampled[l](x, y, c) = select((y % 2) == 0,
-                                       upsampledx[l](x, y/2, c),
-                                       0.5f * (upsampledx[l](x, y/2, c) +
-                                               upsampledx[l](x, y/2+1, c)));
-        interpolated[l](x, y, c) = downsampled[l](x, y, c) + (1.0f - downsampled[l](x, y, 3)) * upsampled[l](x, y, c);
-    }
-
-    Func normalize("normalize");
-    normalize(x, y, c) = interpolated[0](x, y, c) / interpolated[0](x, y, 3);
-
-    Func final("final");
-    final(x, y, c) = normalize(x, y, c);
-
-    AUTOTUNE_HOOK(final);
-
-    int sched;
-    char *target = getenv("HL_TARGET");
-    if (target && std::string(target) == "ptx") {
-        sched = 4;
-    } else {
-        sched = 2;
-    }
-
-    switch (sched) {
-    case 0:
-    {
-        //std::cout << "Flat schedule." << std::endl;
-        for (unsigned int l = 0; l < levels; ++l) {
-            downsampled[l].compute_root();
-            interpolated[l].compute_root();
-        }
-        final.compute_root();
-        break;
-    }
-    case 1:
-    {
-        //std::cout << "Flat schedule with vectorization." << std::endl;
-        for (unsigned int l = 0; l < levels; ++l) {
-            downsampled[l].compute_root().vectorize(x,4);
-            interpolated[l].compute_root().vectorize(x,4);
-        }
-        final.compute_root();
-        break;
-    }
-    case 2:
-    {
-        Var xi, yi;
-        //std::cout << "Flat schedule with parallelization + vectorization." << std::endl;
-        clamped.compute_root().parallel(y).reorder(c, x, y).reorder_storage(c, x, y).vectorize(c, 4);
-        for (unsigned int l = 1; l < levels-1; ++l) {
-            if (l > 0) downsampled[l].compute_root().parallel(y).reorder(c, x, y).reorder_storage(c, x, y).vectorize(c, 4);
-            interpolated[l].compute_root().parallel(y).reorder(c, x, y).reorder_storage(c, x, y).vectorize(c, 4);
-            interpolated[l].unroll(x, 2).unroll(y, 2);
-        }
-        final.reorder(c, x, y).bound(c, 0, 3).parallel(y);
-        final.tile(x, y, xi, yi, 2, 2).unroll(xi).unroll(yi);
-        break;
-    }
-    case 3:
-    {
-        //std::cout << "Flat schedule with vectorization sometimes." << std::endl;
-        for (unsigned int l = 0; l < levels; ++l) {
-            if (l + 4 < levels) {
-                Var yo,yi;
-                downsampled[l].compute_root().vectorize(x,4);
-                interpolated[l].compute_root().vectorize(x,4);
-            } else {
-                downsampled[l].compute_root();
-                interpolated[l].compute_root();
-            }
-        }
-        final.compute_root();
-        break;
-    }
-    case 4:
-    {
-        //std::cout << "GPU schedule." << std::endl;
-
-        // Some gpus don't have enough memory to process the entire
-        // image, so we process the image in tiles.
-        Var yo, yi, xo, xi;
-        final.reorder(c, x, y).bound(c, 0, 3).vectorize(x, 4);
-        final.tile(x, y, xo, yo, xi, yi, input.width()/4, input.height()/4);
-        normalize.compute_at(final, xo).reorder(c, x, y).cuda_tile(x, y, 16, 16).unroll(c);
-
-        // Start from level 1 to save memory - level zero will be computed on demand
-        for (unsigned int l = 1; l < levels; ++l) {
-            int tile_size = 32 >> l;
-            if (tile_size < 1) tile_size = 1;
-            if (tile_size > 16) tile_size = 16;
-            downsampled[l].compute_root().cuda_tile(x, y, c, tile_size, tile_size, 4);
-            interpolated[l].compute_at(final, xo).cuda_tile(x, y, c, tile_size, tile_size, 4);
-        }
-
-        break;
-    }
-    default:
-        assert(0 && "No schedule with this number.");
-    }
-
-    BASELINE_HOOK(final);
-
-#if 0
-    // JIT compile the pipeline eagerly, so we don't interfere with timing
-    final.compile_jit();
-
-    // Image<float> in_png = load<float>(argv[1]);
-    Image<float> out(2048, 2048, 3);
-    // assert(in_png.channels() == 4);
-    // input.set(in_png);
-    final.infer_input_bounds(out);
-
-    std::cout << "Running... " << std::endl;
-    double min = std::numeric_limits<double>::infinity();
-    const unsigned int iters = 20;
-
-    for (unsigned int x = 0; x < iters; ++x) {
-        double before = now();
-        final.realize(out);
-        double after = now();
-        double amt = after - before;
-
-        std::cout << "   " << amt * 1000 << std::endl;
-        if (amt < min) min = amt;
-
-    }
-    std::cout << " took " << min * 1000 << " msec." << std::endl;
-
-    // vector<Argument> args;
-    // args.push_back(input);
-    // final.compile_to_assembly("test.s", args);
-    // save(out, argv[2]);
-#endif
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simple.settings b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simple.settings
deleted file mode 100644
index cffd184ed6bc406bb88fb52d1e67ac2349df9532..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simple.settings
+++ /dev/null
@@ -1,185 +0,0 @@
-{
-  "functions": [
-    {
-      "calls": [], 
-      "name": "clamped", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "clamped"
-      ], 
-      "name": "downsampled", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$2"
-      ], 
-      "name": "downsampled$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$3"
-      ], 
-      "name": "downsampled$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled"
-      ], 
-      "name": "downx$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$2"
-      ], 
-      "name": "downx$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$3"
-      ], 
-      "name": "interpolated$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$2", 
-        "upsampled$2"
-      ], 
-      "name": "interpolated$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled", 
-        "upsampled$3"
-      ], 
-      "name": "interpolated$4", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$4"
-      ], 
-      "name": "normalize", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$2"
-      ], 
-      "name": "upsampled$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$3"
-      ], 
-      "name": "upsampled$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$2"
-      ], 
-      "name": "upsampledx$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$3"
-      ], 
-      "name": "upsampledx$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "normalize"
-      ], 
-      "name": "final", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }
-  ], 
-  "input_size": "2048, 2048, 3"
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simplest.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simplest.cpp
deleted file mode 100644
index cf570558360236d16b21379901e40b3ee8481fd4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simplest.cpp
+++ /dev/null
@@ -1,208 +0,0 @@
-#include "Halide.h"
-
-#define AUTOTUNE_HOOK(x)
-#define BASELINE_HOOK(x)
-
-using namespace Halide;
-
-#include <iostream>
-#include <limits>
-
-#include <sys/time.h>
-
-using std::vector;
-
-double now() {
-    struct timeval tv;
-    gettimeofday(&tv, NULL);
-    static bool first_call = true;
-    static time_t first_sec = 0;
-    if (first_call) {
-        first_call = false;
-        first_sec = tv.tv_sec;
-    }
-    assert(tv.tv_sec >= first_sec);
-    return (tv.tv_sec - first_sec) + (tv.tv_usec / 1000000.0);
-}
-
-int main(int argc, char **argv) {
-    ImageParam input(Float(32), 3, "input");
-
-    const unsigned int levels = 2;
-
-    Func downsampled[levels];
-    Func downx[levels];
-    Func interpolated[levels];
-    Func upsampled[levels];
-    Func upsampledx[levels];
-    Var x("x"), y("y"), c("c");
-
-    downsampled[0] = Func("downsampled");
-    downx[0] = Func("downx");
-    interpolated[0] = Func("interpolated");
-    upsampled[0] = Func("upsampled");
-    upsampledx[0] = Func("upsampledx");
-
-    Func clamped("clamped");
-    clamped(x, y, c) = input(clamp(x, 0, input.width()-1), clamp(y, 0, input.height()-1), c);
-
-    // This triggers a bug in llvm 3.3 (3.2 and trunk are fine), so we
-    // rewrite it in a way that doesn't trigger the bug. The rewritten
-    // form assumes the input alpha is zero or one.
-    // downsampled[0](x, y, c) = select(c < 3, clamped(x, y, c) * clamped(x, y, 3), clamped(x, y, 3));
-    downsampled[0](x, y, c) = clamped(x, y, c) * clamped(x, y, 3);
-
-    for (unsigned int l = 1; l < levels; ++l) {
-        downx[l] = Func("downx");
-        downsampled[l] = Func("downsampled");
-        downx[l](x, y, c) = (downsampled[l-1](x*2-1, y, c) +
-                             2.0f * downsampled[l-1](x*2, y, c) +
-                             downsampled[l-1](x*2+1, y, c)) * 0.25f;
-        downsampled[l](x, y, c) = (downx[l](x, y*2-1, c) +
-                                   2.0f * downx[l](x, y*2, c) +
-                                   downx[l](x, y*2+1, c)) * 0.25f;
-    }
-    interpolated[levels-1] = Func("interpolated");
-    interpolated[levels-1](x, y, c) = downsampled[levels-1](x, y, c);
-    for (unsigned int l = levels-2; l < levels; --l) {
-        upsampledx[l] = Func("upsampledx");
-        upsampled[l] = Func("upsampled");
-        interpolated[l] = Func("interpolated");
-        upsampledx[l](x, y, c) = select((x % 2) == 0,
-                                        interpolated[l+1](x/2, y, c),
-                                        0.5f * (interpolated[l+1](x/2, y, c) +
-                                                interpolated[l+1](x/2+1, y, c)));
-        upsampled[l](x, y, c) = select((y % 2) == 0,
-                                       upsampledx[l](x, y/2, c),
-                                       0.5f * (upsampledx[l](x, y/2, c) +
-                                               upsampledx[l](x, y/2+1, c)));
-        interpolated[l](x, y, c) = downsampled[l](x, y, c) + (1.0f - downsampled[l](x, y, 3)) * upsampled[l](x, y, c);
-    }
-
-    Func normalize("normalize");
-    normalize(x, y, c) = interpolated[0](x, y, c) / interpolated[0](x, y, 3);
-
-    Func final("final");
-    final(x, y, c) = normalize(x, y, c);
-
-    AUTOTUNE_HOOK(final);
-
-    int sched;
-    char *target = getenv("HL_TARGET");
-    if (target && std::string(target) == "ptx") {
-        sched = 4;
-    } else {
-        sched = 2;
-    }
-
-    switch (sched) {
-    case 0:
-    {
-        //std::cout << "Flat schedule." << std::endl;
-        for (unsigned int l = 0; l < levels; ++l) {
-            downsampled[l].compute_root();
-            interpolated[l].compute_root();
-        }
-        final.compute_root();
-        break;
-    }
-    case 1:
-    {
-        //std::cout << "Flat schedule with vectorization." << std::endl;
-        for (unsigned int l = 0; l < levels; ++l) {
-            downsampled[l].compute_root().vectorize(x,4);
-            interpolated[l].compute_root().vectorize(x,4);
-        }
-        final.compute_root();
-        break;
-    }
-    case 2:
-    {
-        Var xi, yi;
-        //std::cout << "Flat schedule with parallelization + vectorization." << std::endl;
-        clamped.compute_root().parallel(y).reorder(c, x, y).reorder_storage(c, x, y).vectorize(c, 4);
-        for (unsigned int l = 1; l < levels-1; ++l) {
-            if (l > 0) downsampled[l].compute_root().parallel(y).reorder(c, x, y).reorder_storage(c, x, y).vectorize(c, 4);
-            interpolated[l].compute_root().parallel(y).reorder(c, x, y).reorder_storage(c, x, y).vectorize(c, 4);
-            interpolated[l].unroll(x, 2).unroll(y, 2);
-        }
-        final.reorder(c, x, y).bound(c, 0, 3).parallel(y);
-        final.tile(x, y, xi, yi, 2, 2).unroll(xi).unroll(yi);
-        break;
-    }
-    case 3:
-    {
-        //std::cout << "Flat schedule with vectorization sometimes." << std::endl;
-        for (unsigned int l = 0; l < levels; ++l) {
-            if (l + 4 < levels) {
-                Var yo,yi;
-                downsampled[l].compute_root().vectorize(x,4);
-                interpolated[l].compute_root().vectorize(x,4);
-            } else {
-                downsampled[l].compute_root();
-                interpolated[l].compute_root();
-            }
-        }
-        final.compute_root();
-        break;
-    }
-    case 4:
-    {
-        //std::cout << "GPU schedule." << std::endl;
-
-        // Some gpus don't have enough memory to process the entire
-        // image, so we process the image in tiles.
-        Var yo, yi, xo, xi;
-        final.reorder(c, x, y).bound(c, 0, 3).vectorize(x, 4);
-        final.tile(x, y, xo, yo, xi, yi, input.width()/4, input.height()/4);
-        normalize.compute_at(final, xo).reorder(c, x, y).cuda_tile(x, y, 16, 16).unroll(c);
-
-        // Start from level 1 to save memory - level zero will be computed on demand
-        for (unsigned int l = 1; l < levels; ++l) {
-            int tile_size = 32 >> l;
-            if (tile_size < 1) tile_size = 1;
-            if (tile_size > 16) tile_size = 16;
-            downsampled[l].compute_root().cuda_tile(x, y, c, tile_size, tile_size, 4);
-            interpolated[l].compute_at(final, xo).cuda_tile(x, y, c, tile_size, tile_size, 4);
-        }
-
-        break;
-    }
-    default:
-        assert(0 && "No schedule with this number.");
-    }
-
-    BASELINE_HOOK(final);
-
-#if 0
-    // JIT compile the pipeline eagerly, so we don't interfere with timing
-    final.compile_jit();
-
-    // Image<float> in_png = load<float>(argv[1]);
-    Image<float> out(2048, 2048, 3);
-    // assert(in_png.channels() == 4);
-    // input.set(in_png);
-    final.infer_input_bounds(out);
-
-    std::cout << "Running... " << std::endl;
-    double min = std::numeric_limits<double>::infinity();
-    const unsigned int iters = 20;
-
-    for (unsigned int x = 0; x < iters; ++x) {
-        double before = now();
-        final.realize(out);
-        double after = now();
-        double amt = after - before;
-
-        std::cout << "   " << amt * 1000 << std::endl;
-        if (amt < min) min = amt;
-
-    }
-    std::cout << " took " << min * 1000 << " msec." << std::endl;
-
-    // vector<Argument> args;
-    // args.push_back(input);
-    // final.compile_to_assembly("test.s", args);
-    // save(out, argv[2]);
-#endif
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simplest.settings b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simplest.settings
deleted file mode 100644
index 5f22d20f5f9cf355bfd07ca408264f73031a14d0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate-simplest.settings
+++ /dev/null
@@ -1,124 +0,0 @@
-{
-  "functions": [
-    {
-      "calls": [], 
-      "name": "clamped", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "clamped"
-      ], 
-      "name": "downsampled", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$2"
-      ], 
-      "name": "downsampled$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled"
-      ], 
-      "name": "downx$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$2"
-      ], 
-      "name": "interpolated$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled", 
-        "upsampled$2"
-      ], 
-      "name": "interpolated$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$3"
-      ], 
-      "name": "normalize", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$2"
-      ], 
-      "name": "upsampled$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$2"
-      ], 
-      "name": "upsampledx$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "normalize"
-      ], 
-      "name": "final", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }
-  ], 
-  "input_size": "2048, 2048, 3"
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate.cpp
deleted file mode 100644
index 1ca4ae5bd352fa524ed6aeafbe5795c913e1f6b8..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate.cpp
+++ /dev/null
@@ -1,208 +0,0 @@
-#include "Halide.h"
-
-#define AUTOTUNE_HOOK(x)
-#define BASELINE_HOOK(x)
-
-using namespace Halide;
-
-#include <iostream>
-#include <limits>
-
-#include <sys/time.h>
-
-using std::vector;
-
-double now() {
-    struct timeval tv;
-    gettimeofday(&tv, NULL);
-    static bool first_call = true;
-    static time_t first_sec = 0;
-    if (first_call) {
-        first_call = false;
-        first_sec = tv.tv_sec;
-    }
-    assert(tv.tv_sec >= first_sec);
-    return (tv.tv_sec - first_sec) + (tv.tv_usec / 1000000.0);
-}
-
-int main(int argc, char **argv) {
-    ImageParam input(Float(32), 3, "input");
-
-    const unsigned int levels = 10;
-
-    Func downsampled[levels];
-    Func downx[levels];
-    Func interpolated[levels];
-    Func upsampled[levels];
-    Func upsampledx[levels];
-    Var x("x"), y("y"), c("c");
-
-    downsampled[0] = Func("downsampled");
-    downx[0] = Func("downx");
-    interpolated[0] = Func("interpolated");
-    upsampled[0] = Func("upsampled");
-    upsampledx[0] = Func("upsampledx");
-
-    Func clamped("clamped");
-    clamped(x, y, c) = input(clamp(x, 0, input.width()-1), clamp(y, 0, input.height()-1), c);
-
-    // This triggers a bug in llvm 3.3 (3.2 and trunk are fine), so we
-    // rewrite it in a way that doesn't trigger the bug. The rewritten
-    // form assumes the input alpha is zero or one.
-    // downsampled[0](x, y, c) = select(c < 3, clamped(x, y, c) * clamped(x, y, 3), clamped(x, y, 3));
-    downsampled[0](x, y, c) = clamped(x, y, c) * clamped(x, y, 3);
-
-    for (unsigned int l = 1; l < levels; ++l) {
-        downx[l] = Func("downx");
-        downsampled[l] = Func("downsampled");
-        downx[l](x, y, c) = (downsampled[l-1](x*2-1, y, c) +
-                             2.0f * downsampled[l-1](x*2, y, c) +
-                             downsampled[l-1](x*2+1, y, c)) * 0.25f;
-        downsampled[l](x, y, c) = (downx[l](x, y*2-1, c) +
-                                   2.0f * downx[l](x, y*2, c) +
-                                   downx[l](x, y*2+1, c)) * 0.25f;
-    }
-    interpolated[levels-1] = Func("interpolated");
-    interpolated[levels-1](x, y, c) = downsampled[levels-1](x, y, c);
-    for (unsigned int l = levels-2; l < levels; --l) {
-        upsampledx[l] = Func("upsampledx");
-        upsampled[l] = Func("upsampled");
-        interpolated[l] = Func("interpolated");
-        upsampledx[l](x, y, c) = select((x % 2) == 0,
-                                        interpolated[l+1](x/2, y, c),
-                                        0.5f * (interpolated[l+1](x/2, y, c) +
-                                                interpolated[l+1](x/2+1, y, c)));
-        upsampled[l](x, y, c) = select((y % 2) == 0,
-                                       upsampledx[l](x, y/2, c),
-                                       0.5f * (upsampledx[l](x, y/2, c) +
-                                               upsampledx[l](x, y/2+1, c)));
-        interpolated[l](x, y, c) = downsampled[l](x, y, c) + (1.0f - downsampled[l](x, y, 3)) * upsampled[l](x, y, c);
-    }
-
-    Func normalize("normalize");
-    normalize(x, y, c) = interpolated[0](x, y, c) / interpolated[0](x, y, 3);
-
-    Func final("final");
-    final(x, y, c) = normalize(x, y, c);
-
-    AUTOTUNE_HOOK(final);
-
-    int sched;
-    char *target = getenv("HL_TARGET");
-    if (target && std::string(target) == "ptx") {
-        sched = 4;
-    } else {
-        sched = 2;
-    }
-
-    switch (sched) {
-    case 0:
-    {
-        //std::cout << "Flat schedule." << std::endl;
-        for (unsigned int l = 0; l < levels; ++l) {
-            downsampled[l].compute_root();
-            interpolated[l].compute_root();
-        }
-        final.compute_root();
-        break;
-    }
-    case 1:
-    {
-        //std::cout << "Flat schedule with vectorization." << std::endl;
-        for (unsigned int l = 0; l < levels; ++l) {
-            downsampled[l].compute_root().vectorize(x,4);
-            interpolated[l].compute_root().vectorize(x,4);
-        }
-        final.compute_root();
-        break;
-    }
-    case 2:
-    {
-        Var xi, yi;
-        //std::cout << "Flat schedule with parallelization + vectorization." << std::endl;
-        clamped.compute_root().parallel(y).reorder(c, x, y).reorder_storage(c, x, y).vectorize(c, 4);
-        for (unsigned int l = 1; l < levels-1; ++l) {
-            if (l > 0) downsampled[l].compute_root().parallel(y).reorder(c, x, y).reorder_storage(c, x, y).vectorize(c, 4);
-            interpolated[l].compute_root().parallel(y).reorder(c, x, y).reorder_storage(c, x, y).vectorize(c, 4);
-            interpolated[l].unroll(x, 2).unroll(y, 2);
-        }
-        final.reorder(c, x, y).bound(c, 0, 3).parallel(y);
-        final.tile(x, y, xi, yi, 2, 2).unroll(xi).unroll(yi);
-        break;
-    }
-    case 3:
-    {
-        //std::cout << "Flat schedule with vectorization sometimes." << std::endl;
-        for (unsigned int l = 0; l < levels; ++l) {
-            if (l + 4 < levels) {
-                Var yo,yi;
-                downsampled[l].compute_root().vectorize(x,4);
-                interpolated[l].compute_root().vectorize(x,4);
-            } else {
-                downsampled[l].compute_root();
-                interpolated[l].compute_root();
-            }
-        }
-        final.compute_root();
-        break;
-    }
-    case 4:
-    {
-        //std::cout << "GPU schedule." << std::endl;
-
-        // Some gpus don't have enough memory to process the entire
-        // image, so we process the image in tiles.
-        Var yo, yi, xo, xi;
-        final.reorder(c, x, y).bound(c, 0, 3).vectorize(x, 4);
-        final.tile(x, y, xo, yo, xi, yi, input.width()/4, input.height()/4);
-        normalize.compute_at(final, xo).reorder(c, x, y).cuda_tile(x, y, 16, 16).unroll(c);
-
-        // Start from level 1 to save memory - level zero will be computed on demand
-        for (unsigned int l = 1; l < levels; ++l) {
-            int tile_size = 32 >> l;
-            if (tile_size < 1) tile_size = 1;
-            if (tile_size > 16) tile_size = 16;
-            downsampled[l].compute_root().cuda_tile(x, y, c, tile_size, tile_size, 4);
-            interpolated[l].compute_at(final, xo).cuda_tile(x, y, c, tile_size, tile_size, 4);
-        }
-
-        break;
-    }
-    default:
-        assert(0 && "No schedule with this number.");
-    }
-
-    BASELINE_HOOK(final);
-
-#if 0
-    // JIT compile the pipeline eagerly, so we don't interfere with timing
-    final.compile_jit();
-
-    // Image<float> in_png = load<float>(argv[1]);
-    Image<float> out(2048, 2048, 3);
-    // assert(in_png.channels() == 4);
-    // input.set(in_png);
-    final.infer_input_bounds(out);
-
-    std::cout << "Running... " << std::endl;
-    double min = std::numeric_limits<double>::infinity();
-    const unsigned int iters = 20;
-
-    for (unsigned int x = 0; x < iters; ++x) {
-        double before = now();
-        final.realize(out);
-        double after = now();
-        double amt = after - before;
-
-        std::cout << "   " << amt * 1000 << std::endl;
-        if (amt < min) min = amt;
-
-    }
-    std::cout << " took " << min * 1000 << " msec." << std::endl;
-
-    // vector<Argument> args;
-    // args.push_back(input);
-    // final.compile_to_assembly("test.s", args);
-    // save(out, argv[2]);
-#endif
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate.settings b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate.settings
deleted file mode 100644
index 3a51d8062674581182fb204ddb943f85cd3b4de4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/interpolate.settings
+++ /dev/null
@@ -1,612 +0,0 @@
-{
-  "functions": [
-    {
-      "calls": [], 
-      "name": "clamped", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "clamped"
-      ], 
-      "name": "downsampled", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$10"
-      ], 
-      "name": "downsampled$10", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$2"
-      ], 
-      "name": "downsampled$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$3"
-      ], 
-      "name": "downsampled$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$4"
-      ], 
-      "name": "downsampled$4", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$5"
-      ], 
-      "name": "downsampled$5", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$6"
-      ], 
-      "name": "downsampled$6", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$7"
-      ], 
-      "name": "downsampled$7", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$8"
-      ], 
-      "name": "downsampled$8", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downx$9"
-      ], 
-      "name": "downsampled$9", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$9"
-      ], 
-      "name": "downx$10", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled"
-      ], 
-      "name": "downx$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$2"
-      ], 
-      "name": "downx$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$3"
-      ], 
-      "name": "downx$4", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$4"
-      ], 
-      "name": "downx$5", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$5"
-      ], 
-      "name": "downx$6", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$6"
-      ], 
-      "name": "downx$7", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$7"
-      ], 
-      "name": "downx$8", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$8"
-      ], 
-      "name": "downx$9", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$2", 
-        "upsampled$9"
-      ], 
-      "name": "interpolated$10", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled", 
-        "upsampled$10"
-      ], 
-      "name": "interpolated$11", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$10"
-      ], 
-      "name": "interpolated$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$9", 
-        "upsampled$2"
-      ], 
-      "name": "interpolated$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$8", 
-        "upsampled$3"
-      ], 
-      "name": "interpolated$4", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$7", 
-        "upsampled$4"
-      ], 
-      "name": "interpolated$5", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$6", 
-        "upsampled$5"
-      ], 
-      "name": "interpolated$6", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$5", 
-        "upsampled$6"
-      ], 
-      "name": "interpolated$7", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$4", 
-        "upsampled$7"
-      ], 
-      "name": "interpolated$8", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "downsampled$3", 
-        "upsampled$8"
-      ], 
-      "name": "interpolated$9", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$11"
-      ], 
-      "name": "normalize", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$10"
-      ], 
-      "name": "upsampled$10", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$2"
-      ], 
-      "name": "upsampled$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$3"
-      ], 
-      "name": "upsampled$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$4"
-      ], 
-      "name": "upsampled$4", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$5"
-      ], 
-      "name": "upsampled$5", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$6"
-      ], 
-      "name": "upsampled$6", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$7"
-      ], 
-      "name": "upsampled$7", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$8"
-      ], 
-      "name": "upsampled$8", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "upsampledx$9"
-      ], 
-      "name": "upsampled$9", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$10"
-      ], 
-      "name": "upsampledx$10", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$2"
-      ], 
-      "name": "upsampledx$2", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$3"
-      ], 
-      "name": "upsampledx$3", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$4"
-      ], 
-      "name": "upsampledx$4", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$5"
-      ], 
-      "name": "upsampledx$5", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$6"
-      ], 
-      "name": "upsampledx$6", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$7"
-      ], 
-      "name": "upsampledx$7", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$8"
-      ], 
-      "name": "upsampledx$8", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "interpolated$9"
-      ], 
-      "name": "upsampledx$9", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }, 
-    {
-      "calls": [
-        "normalize"
-      ], 
-      "name": "final", 
-      "update_calls": [], 
-      "vars": [
-        "x", 
-        "y", 
-        "c"
-      ]
-    }
-  ], 
-  "input_size": "1024, 1024, 3"
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/wavelet.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/wavelet.cpp
deleted file mode 100644
index e2cb008790ac0161a2365388744cb9b482a6d7b8..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/wavelet.cpp
+++ /dev/null
@@ -1,93 +0,0 @@
-#include "Halide.h"
-
-#define AUTOTUNE_HOOK(x)
-#define BASELINE_HOOK(x)
-
-using namespace Halide;
-
-Var x("x"), y("y"), c("c");
-
-Func haar_x(Func in) {
-    Func out;
-    out(x, y, c) = select(c == 0, 
-                          (in(2*x, y) + in(2*x+1, y)),
-                          (in(2*x, y) - in(2*x+1, y)))/2;
-    out.unroll(c, 2);
-    return out;
-}
-
-Func inverse_haar_x(Func in) {
-    Func out;
-    out(x, y) = select(x%2 == 0, 
-                       in(x/2, y, 0) + in(x/2, y, 1),
-                       in(x/2, y, 0) - in(x/2, y, 1));
-    out.unroll(x, 2);
-    return out;
-}
-
-
-const float D0 = 0.4829629131445341f;
-const float D1 = 0.83651630373780772f;
-const float D2 = 0.22414386804201339f;
-const float D3 = -0.12940952255126034f;
-
-/*
-const float D0 = 0.34150635f;
-const float D1 = 0.59150635f;
-const float D2 = 0.15849365f;
-const float D3 = -0.1830127f;
-*/
-
-Func daubechies_x(Func in) {
-    Func out;
-    out(x, y, c) = select(c == 0, 
-                          D0*in(2*x-1, y) + D1*in(2*x, y) + D2*in(2*x+1, y) + D3*in(2*x+2, y),
-                          D3*in(2*x-1, y) - D2*in(2*x, y) + D1*in(2*x+1, y) - D0*in(2*x+2, y));
-   //out.unroll(c, 2);
-    return out;
-}
-
-Func inverse_daubechies_x(Func in) {
-    Func out("inv_daub_x");
-    out(x, y) = select(x%2 == 0,
-                       D2*in(x/2, y, 0) + D1*in(x/2, y, 1) + D0*in(x/2+1, y, 0) + D3*in(x/2+1, y, 1),
-                       D3*in(x/2, y, 0) - D0*in(x/2, y, 1) + D1*in(x/2+1, y, 0) - D2*in(x/2+1, y, 1));
-   //out.unroll(x, 2);
-    return out;
-}
-
-int main(int argc, char **argv) {
-
-    ImageParam image(Float(32), 2);
-    ImageParam wavelet(Float(32), 3);
-
-    // Add a boundary condition for daubechies
-    Func clamped;
-    clamped(x, y) = image(clamp(x, 0, image.width()-1),
-                          clamp(y, 0, image.height()-1));
-    Func wavelet_clamped("wavelet_clamped");
-    wavelet_clamped(x, y, c) = wavelet(clamp(x, 0, wavelet.width()-1),
-                                       clamp(y, 0, wavelet.height()-1), c);
-
-
-  // Func inv_haar_x = inverse_haar_x(wavelet_clamped);
-  // inv_haar_x.compile_to_file("inverse_haar_x", wavelet);
-
-  // Func for_haar_x = haar_x(clamped);
-  // for_haar_x.compile_to_file("haar_x", image);
-
-    Func inv_daub_x = inverse_daubechies_x(wavelet_clamped);
-    //inv_daub_x.compile_to_file("inverse_daubechies_x", wavelet);
-
-    AUTOTUNE_HOOK(inv_daub_x);
-    inv_daub_x.unroll(x, 2).vectorize(x, 8).parallel(y);
-    BASELINE_HOOK(inv_daub_x);
-
-  // Func for_daub_x = daubechies_x(clamped);
-    //for_daub_x.compile_to_file("daubechies_x", image);
-
-    return 0;
-}
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/wavelet.settings b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/wavelet.settings
deleted file mode 100644
index 6fbb5c4006dd77fee03f8635de2b22079eb0a908..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/apps/wavelet.settings
+++ /dev/null
@@ -1,4 +0,0 @@
-{"input_size": "2048, 2048",
- "functions": [
-    {"name": "wavelet_clamped", "vars": ["x", "y", "c"], "calls": []},
-    {"name": "inv_daub_x", "vars": ["x", "y"], "calls": ["wavelet_clamped"]}]}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/halidetuner.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/halidetuner.py
deleted file mode 100755
index 08e6732575557e41736fb71a321ee7190e181e7e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/halidetuner.py
+++ /dev/null
@@ -1,682 +0,0 @@
-#!/usr/bin/env python
-# coding: utf-8
-#
-# Example of synthesizing Halide schedules using OpenTuner.  This program
-# expects a compiled version of Halide to exist at ~/Halide or at the location
-# specified by --halide-dir.
-#
-# Halide programs must be modified by:
-#  1) Inserting AUTOTUNE_HOOK(Func) directly after the algorithm definition
-#     in main()
-#  2) Creating a settings file that describes the functions and variables
-#     (see apps/halide_blur.settings for an example)
-#
-# Halide can be found here: https://github.com/halide/Halide
-#
-
-import adddeps  # fix sys.path
-
-import argparse
-import collections
-import hashlib
-import json
-import logging
-import math
-import os
-import re
-import subprocess
-import tempfile
-import textwrap
-from cStringIO import StringIO
-from fn import _
-from pprint import pprint
-
-import opentuner
-from opentuner.search.manipulator import ConfigurationManipulator
-from opentuner.search.manipulator import PowerOfTwoParameter
-from opentuner.search.manipulator import PermutationParameter
-from opentuner.search.manipulator import BooleanParameter
-from opentuner.search.manipulator import ScheduleParameter
-
-
-COMPILE_CMD = (
-  '{args.cxx} "{cpp}" -o "{bin}" -I "{args.halide_dir}/include" '
-  '"{args.halide_dir}/bin/$BUILD_PREFIX/libHalide.a" -ldl -lcurses -lpthread {args.cxxflags} '
-  '-DAUTOTUNE_N="{args.input_size}" -DAUTOTUNE_TRIALS={args.trials} '
-  '-DAUTOTUNE_LIMIT={limit} -fno-rtti')
-
-log = logging.getLogger('halide')
-
-parser = argparse.ArgumentParser(parents=opentuner.argparsers())
-parser.add_argument('source', help='Halide source file annotated with '
-                                   'AUTOTUNE_HOOK')
-parser.add_argument('--halide-dir', default=os.path.expanduser('~/Halide'),
-                    help='Installation directory for Halide')
-parser.add_argument('--input-size',
-                    help='Input size to test with')
-parser.add_argument('--trials', default=3, type=int,
-                    help='Number of times to test each schedule')
-parser.add_argument('--nesting', default=2, type=int,
-                    help='Maximum depth for generated loops')
-parser.add_argument('--max-split-factor', default=8, type=int,
-                    help='The largest value a single split() can add')
-parser.add_argument('--compile-command', default=COMPILE_CMD,
-                    help='How to compile generated C++ code')
-parser.add_argument('--cxx', default='c++',
-                    help='C++ compiler to use (e.g., g++ or clang++)')
-parser.add_argument('--cxxflags', default='',
-                    help='Extra flags to the C++ compiler')
-parser.add_argument('--tmp-dir',
-                    default=('/run/shm' if os.access('/run/shm', os.W_OK)
-                             else '/tmp'),
-                    help='Where to store generated tests')
-parser.add_argument('--settings-file',
-                    help='Override location of json encoded settings')
-parser.add_argument('--debug-error',
-                    help='Stop on errors matching a given string')
-parser.add_argument('--limit', type=float, default=30,
-                    help='Kill compile + runs taking too long (seconds)')
-parser.add_argument('--memory-limit', type=int, default=1024 ** 3,
-                    help='Set memory ulimit on unix based systems')
-parser.add_argument('--enable-unroll', action='store_true',
-                    help='Enable .unroll(...) generation')
-parser.add_argument('--enable-store-at', action='store_true',
-                    help='Never generate .store_at(...)')
-parser.add_argument('--gated-store-reorder', action='store_true',
-                    help='Only reorder storage if a special parameter is given')
-group = parser.add_mutually_exclusive_group()
-group.add_argument('--random-test', action='store_true',
-                   help='Generate a random configuration and run it')
-group.add_argument('--random-source', action='store_true',
-                   help='Generate a random configuration and print source ')
-group.add_argument('--make-settings-file', action='store_true',
-                   help='Create a skeleton settings file from call graph')
-
-
-# class HalideRandomConfig(opentuner.search.technique.SearchTechnique):
-#   def desired_configuration(self):
-#     '''
-#     inject random configs with no compute_at() calls to kickstart the search process
-#     '''
-#     cfg = self.manipulator.random()
-#     for k in cfg.keys():
-#       if re.match('.*_compute_level', k):
-#         cfg[k] = LoopLevel.INLINE
-#     return cfg
-#
-# technique.register(bandittechniques.AUCBanditMetaTechnique([
-#         HalideRandomConfig(),
-#         differentialevolution.DifferentialEvolutionAlt(),
-#         evolutionarytechniques.UniformGreedyMutation(),
-#         evolutionarytechniques.NormalGreedyMutation(mutation_rate=0.3),
-#       ], name = "HalideMetaTechnique"))
-
-
-class HalideTuner(opentuner.measurement.MeasurementInterface):
-  def __init__(self, args):
-    # args.technique = ['HalideMetaTechnique']
-    super(HalideTuner, self).__init__(args, program_name=args.source)
-    timing_prefix = open(os.path.join(os.path.dirname(__file__),
-                                      'timing_prefix.h')).read()
-    self.template = timing_prefix + open(args.source).read()
-    self.min_collection_cost = float('inf')
-    if not args.settings_file:
-      args.settings_file = os.path.splitext(args.source)[0] + '.settings'
-    if not args.make_settings_file:
-      with open(args.settings_file) as fd:
-        self.settings = json.load(fd)
-      self.post_dominators = post_dominators(self.settings)
-      if not args.input_size:
-        args.input_size = self.settings['input_size']
-    else:
-      self.settings = None
-      self.post_dominators = None
-      args.input_size = '1, 1'
-    # set "program_version" based on hash of halidetuner.py, program source
-    h = hashlib.md5()
-    #with open(__file__) as src:
-    #  h.update(src.read())
-    with open(args.source) as src:
-      h.update(src.read())
-    self._version = h.hexdigest()
-
-  def compute_order_parameter(self, func):
-    name = func['name']
-    schedule_vars = []
-    schedule_deps = dict()
-    for var in func['vars']:
-      schedule_vars.append((var, 0))
-      for i in xrange(1, self.args.nesting):
-        schedule_vars.append((var, i))
-        schedule_deps[(var, i - 1)] = [(var, i)]
-    return ScheduleParameter('{0}_compute_order'.format(name), schedule_vars,
-                             schedule_deps)
-
-  def manipulator(self):
-    """
-    The definition of the manipulator is meant to mimic the Halide::Schedule
-    data structure and defines the configuration space to search
-    """
-    manipulator = HalideConfigurationManipulator(self)
-    manipulator.add_parameter(HalideComputeAtScheduleParameter(
-      'schedule', self.args, self.settings['functions'],
-      self.post_dominators))
-    for func in self.settings['functions']:
-      name = func['name']
-      manipulator.add_parameter(PermutationParameter(
-        '{0}_store_order'.format(name), func['vars']))
-      manipulator.add_parameter(
-        BooleanParameter('{0}_store_order_enabled'.format(name)))
-      manipulator.add_parameter(self.compute_order_parameter(func))
-      for var in func['vars']:
-        manipulator.add_parameter(PowerOfTwoParameter(
-          '{0}_vectorize'.format(name), 1, self.args.max_split_factor))
-        manipulator.add_parameter(PowerOfTwoParameter(
-          '{0}_unroll'.format(name), 1, self.args.max_split_factor))
-        manipulator.add_parameter(BooleanParameter(
-          '{0}_parallel'.format(name)))
-        for nesting in xrange(1, self.args.nesting):
-          manipulator.add_parameter(PowerOfTwoParameter(
-            '{0}_splitfactor_{1}_{2}'.format(name, nesting, var),
-            1, self.args.max_split_factor))
-
-    return manipulator
-
-  def cfg_to_schedule(self, cfg):
-    """
-    Produce a Halide schedule from a configuration dictionary
-    """
-    o = StringIO()
-    cnt = 0
-    temp_vars = list()
-    schedule = ComputeAtStoreAtParser(cfg['schedule'], self.post_dominators)
-    compute_at = schedule.compute_at
-    store_at = schedule.store_at
-
-    # build list of all used variable names
-    var_names = dict()
-    var_name_order = dict()
-    for func in self.settings['functions']:
-      name = func['name']
-      compute_order = cfg['{0}_compute_order'.format(name)]
-      for var in func['vars']:
-        var_names[(name, var, 0)] = var
-        for nesting in xrange(1, self.args.nesting):
-          split_factor = cfg.get('{0}_splitfactor_{1}_{2}'.format(
-            name, nesting, var), 0)
-          if split_factor > 1 and (name, var, nesting - 1) in var_names:
-            var_names[(name, var, nesting)] = '_{var}{cnt}'.format(
-              func=name, var=var, nesting=nesting, cnt=cnt)
-            temp_vars.append(var_names[(name, var, nesting)])
-          cnt += 1
-      var_name_order[name] = [var_names[(name, v, n)] for v, n in compute_order
-                              if (name, v, n) in var_names]
-
-    # set a schedule for each function
-    for func in self.settings['functions']:
-      name = func['name']
-      inner_var_name = var_name_order[name][-1] # innermost variable in the reordered list for this func
-      vectorize = cfg['{0}_vectorize'.format(name)]
-      if self.args.enable_unroll:
-        unroll = cfg['{0}_unroll'.format(name)]
-      else:
-        unroll = 1
-
-      print >> o, 'Halide::Func(funcs["%s"])' % name
-
-      for var in func['vars']:
-        # handle all splits
-        for nesting in xrange(1, self.args.nesting):
-          split_factor = cfg.get('{0}_splitfactor_{1}_{2}'.format(
-            name, nesting, var), 0)
-          if split_factor <= 1:
-            break
-
-          for nesting2 in xrange(nesting + 1, self.args.nesting):
-            split_factor2 = cfg.get('{0}_splitfactor_{1}_{2}'.format(
-              name, nesting2, var), 0)
-            if split_factor2 <= 1:
-              break
-            split_factor *= split_factor2
-          var_name = var_names[(name, var, nesting)]
-          last_var_name = var_names[(name, var, nesting - 1)]
-          
-          # apply unroll, vectorize factors to all surrounding splits iff we're the innermost var
-          if var_name == inner_var_name:
-            split_factor *= unroll
-            split_factor *= vectorize
-
-          print >> o, '.split({0}, {0}, {1}, {2})'.format(
-            last_var_name, var_name, split_factor)
-
-      # drop unused variables and truncate (Halide supports only 10 reorders)
-      if len(var_name_order[name]) > 1:
-        print >> o, '.reorder({0})'.format(
-            ', '.join(reversed(var_name_order[name][:10])))
-
-      # reorder_storage
-      store_order_enabled = cfg['{0}_store_order_enabled'.format(name)]
-      if store_order_enabled or not self.args.gated_store_reorder:
-        store_order = cfg['{0}_store_order'.format(name)]
-        if len(store_order) > 1:
-          print >> o, '.reorder_storage({0})'.format(', '.join(store_order))
-
-      if unroll > 1:
-        # apply unrolling to innermost var
-        print >> o, '.unroll({0}, {1})'.format(
-          var_name_order[name][-1], unroll * vectorize)
-
-      if vectorize > 1:
-        # apply vectorization to innermost var
-        print >> o, '.vectorize({0}, {1})'.format(
-          var_name_order[name][-1], vectorize)
-      
-      # compute_at(not root)
-      if (compute_at[name] is not None and
-              len(var_name_order[compute_at[name][0]]) >= compute_at[name][1]):
-        at_func, at_idx = compute_at[name]
-        try:
-          at_var = var_name_order[at_func][-at_idx]
-          print >> o, '.compute_at(Halide::Func(funcs["{0}"]), {1})'.format(at_func, at_var)
-          if not self.args.enable_store_at:
-            pass  # disabled
-          elif store_at[name] is None:
-            print >> o, '.store_root()'
-          elif store_at[name] != compute_at[name]:
-            at_func, at_idx = store_at[name]
-            at_var = var_name_order[at_func][-at_idx]
-            print >> o, '.store_at(Halide::Func(funcs["{0}"]), {1})'.format(at_func, at_var)
-        except IndexError:
-          # this is expected when at_idx is too large
-          # TODO: implement a cleaner fix
-          pass
-      # compute_root
-      else:
-        parallel = cfg['{0}_parallel'.format(name)]
-        if parallel:
-          # only apply parallelism to outermost var of root funcs
-          print >> o, '.parallel({0})'.format(var_name_order[name][0])
-        print >> o, '.compute_root()'
-
-      print >> o, ';'
-
-    if temp_vars:
-      return 'Halide::Var {0};\n{1}'.format(
-        ', '.join(temp_vars), o.getvalue())
-    else:
-      return o.getvalue()
-
-  def schedule_to_source(self, schedule):
-    """
-    Generate a temporary Halide cpp file with schedule inserted
-    """
-
-    def repl_autotune_hook(match):
-      tmpl = '''
-    {
-        std::map<std::string, Halide::Internal::Function> funcs = Halide::Internal::find_transitive_calls((%(func)s).function());
-
-        %(sched)s
-
-        _autotune_timing_stub(%(func)s);
-    }'''
-      return tmpl % {"sched": schedule.replace('\n', '\n        '), "func": match.group(1)}
-
-    source = re.sub(r'\n\s*AUTOTUNE_HOOK\(\s*([a-zA-Z0-9_]+)\s*\)',
-                    repl_autotune_hook, self.template)
-    return source
-
-  def run_schedule(self, schedule, limit):
-    """
-    Generate a temporary Halide cpp file with schedule inserted and run it
-    with our timing harness found in timing_prefix.h.
-    """
-    return self.run_source(self.schedule_to_source(schedule), limit)
-
-  def run_baseline(self):
-    """
-    Generate a temporary Halide cpp file with schedule inserted and run it
-    with our timing harness found in timing_prefix.h.
-    """
-
-    def repl_autotune_hook(match):
-      return '\n\n_autotune_timing_stub(%s);' % match.group(1)
-
-    source = re.sub(r'\n\s*BASELINE_HOOK\(\s*([a-zA-Z0-9_]+)\s*\)',
-                    repl_autotune_hook, self.template)
-    return self.run_source(source)
-
-  def run_source(self, source, limit=0, extra_args=''):
-    cmd = ''
-    with tempfile.NamedTemporaryFile(suffix='.cpp', prefix='halide',
-                                     dir=self.args.tmp_dir) as cppfile:
-      cppfile.write(source)
-      cppfile.flush()
-      # binfile = os.path.splitext(cppfile.name)[0] + '.bin'
-      # binfile = '/tmp/halide.bin'
-      binfile = ''
-      with tempfile.NamedTemporaryFile(suffix='.bin', prefix='halide',
-                                               dir=self.args.tmp_dir, delete=False) as binfiletmp:
-
-        binfile = binfiletmp.name # unique temp file to allow multiple concurrent tuner runs
-      assert(binfile)
-      cmd = self.args.compile_command.format(
-        cpp=cppfile.name, bin=binfile, args=self.args,
-        limit=math.ceil(limit) if limit < float('inf') else 0)
-      cmd += ' ' + extra_args
-      compile_result = self.call_program(cmd, limit=self.args.limit,
-                                         memory_limit=self.args.memory_limit)
-      if compile_result['returncode'] != 0:
-        log.error('compile failed: %s', compile_result)
-        return None
-
-    try:
-      result = self.call_program(binfile,
-                                 limit=self.args.limit,
-                                 memory_limit=self.args.memory_limit)
-      stdout = result['stdout']
-      stderr = result['stderr']
-      returncode = result['returncode']
-
-      if result['timeout']:
-        log.info('compiler timeout %d (%.2f+%.0f cost)', self.args.limit,
-                 compile_result['time'], self.args.limit)
-        return float('inf')
-      elif returncode == 142 or returncode == -14:
-        log.info('program timeout %d (%.2f+%.2f cost)', math.ceil(limit),
-                 compile_result['time'], result['time'])
-        return None
-      elif returncode != 0:
-        log.error('invalid schedule (returncode=%d): %s', returncode,
-                  stderr.strip())
-        with tempfile.NamedTemporaryFile(suffix='.cpp', prefix='halide-error',
-                                         dir=self.args.tmp_dir, delete=False) as errfile:
-          errfile.write(source)
-          log.error('failed schedule logged to %s.\ncompile as `%s`.', errfile.name, cmd)
-        if self.args.debug_error is not None and (
-            self.args.debug_error in stderr
-        or self.args.debug_error == ""):
-          self.debug_schedule('/tmp/halideerror.cpp', source)
-        return None
-      else:
-        try:
-          time = json.loads(stdout)['time']
-        except:
-          log.exception('error parsing output: %s', result)
-          return None
-        log.info('success: %.4f (collection cost %.2f + %.2f)',
-                 time, compile_result['time'], result['time'])
-        self.min_collection_cost = min(
-          self.min_collection_cost, result['time'])
-        return time
-    finally:
-      os.unlink(binfile)
-
-  def run_cfg(self, cfg, limit=0):
-    try:
-      schedule = self.cfg_to_schedule(cfg)
-    except:
-      log.exception('error generating schedule')
-      return None
-    return self.run_schedule(schedule, limit)
-
-  def run(self, desired_result, input, limit):
-    time = self.run_cfg(desired_result.configuration.data, limit)
-    if time is not None:
-      return opentuner.resultsdb.models.Result(time=time)
-    else:
-      return opentuner.resultsdb.models.Result(state='ERROR',
-                                               time=float('inf'))
-
-  def save_final_config(self, configuration):
-    """called at the end of tuning"""
-    print 'Final Configuration:'
-    print self.cfg_to_schedule(configuration.data)
-
-  def debug_log_schedule(self, filename, source):
-    open(filename, 'w').write(source)
-    print 'offending schedule written to {0}'.format(filename)
-
-  def debug_schedule(self, filename, source):
-    self.debug_log_schedule(filename, source)
-    raw_input('press ENTER to continue')
-
-  def make_settings_file(self):
-    dump_call_graph_dir = os.path.join(os.path.dirname(__file__),
-                                       'dump-call-graph')
-    if not os.path.isdir(dump_call_graph_dir):
-      subprocess.check_call(['git', 'clone',
-                             'http://github.com/halide/dump-call-graph.git'])
-      assert os.path.isdir(dump_call_graph_dir)
-
-    dump_call_graph_cpp = os.path.join(dump_call_graph_dir, 'DumpCallGraph.cpp')
-    callgraph_file = self.args.settings_file + '.callgraph'
-
-    def repl_autotune_hook(match):
-      return r'''dump_call_graph("%s", %s);
-                 printf("{\"time\": 0}\n");
-                 exit(0);''' % (callgraph_file, match.group(1))
-
-    source = re.sub(r'\n\s*AUTOTUNE_HOOK\(\s*([a-zA-Z0-9_]+)\s*\)',
-                    repl_autotune_hook, self.template)
-    # TODO: BUG! - this only works correctly if given an absolute path to the
-    # program (or explicit settings file). Otherwise it generates the callgraph
-    # in a tmp dir somewhere and fails to find it in a local path here.
-    source = open(dump_call_graph_cpp).read() + source
-    self.run_source(source, extra_args='-I{0}'.format(dump_call_graph_dir))
-    callgraph = json.load(open(callgraph_file))
-    settings = {'input_size': '1024, 1024', 'functions': callgraph}
-    json.dump(settings, open(self.args.settings_file, 'w'), sort_keys=True,
-              indent=2)
-    print textwrap.dedent('''
-
-      {0} has been generated based on call graph of program.
-
-      This file likely needs some manual tweaks in order to work correctly.
-      The input size should be changed to have the right number of dimensions.
-      Any naming differences between variable names and function names must
-      be applied manually.  Some temporary variables not in the source code
-      need to be manually removed.
-
-    '''.format(self.args.settings_file))
-
-
-class ComputeAtStoreAtParser(object):
-  """
-  A recursive descent parser to force proper loop nesting, and enforce post
-  dominator scheduling constraints
-
-  For each function input will have tokens like:
-  ('foo', 's') = store_at location for foo
-  ('foo', '2'), ('foo', '1') = opening the loop nests for foo,
-                               the inner 2 variables
-  ('foo', 'c') = the computation of foo, and closing all loop nests
-
-  The order of these tokens define a loop nest tree which we reconstruct
-  """
-
-  def __init__(self, tokens, post_dominators):
-    self.tokens = list(tokens)  # input, processed back to front
-    self.post_dominators = post_dominators
-    self.compute_at = dict()
-    self.store_at = dict()
-    self.process_root()
-
-  def process_root(self):
-    old_len = len(self.tokens)
-    out = []
-    while self.tokens:
-      if self.tokens[-1][1] == 's':
-        # store at root
-        self.store_at[self.tokens[-1][0]] = None
-        out.append(self.tokens.pop())
-      else:
-        self.process_loopnest(out, [])
-    self.tokens = list(reversed(out))
-    assert old_len == len(self.tokens)
-
-  def process_loopnest(self, out, stack):
-    func, idx = self.tokens[-1]
-    out.append(self.tokens.pop())
-    if idx != 'c':
-      raise Exception('Invalid schedule')
-
-    self.compute_at[func] = None
-    for targ_func, targ_idx in reversed(stack):
-      if targ_func in self.post_dominators[func]:
-        self.compute_at[func] = (targ_func, targ_idx)
-        break
-
-    close_tokens = [(f, i) for f, i in self.tokens if f == func and i != 's']
-    while close_tokens:
-      if self.tokens[-1] == close_tokens[-1]:
-        # proper nesting
-        close_tokens.pop()
-        out.append(self.tokens.pop())
-      elif self.tokens[-1][1] == 'c':
-        self.process_loopnest(out, stack + close_tokens[-1:])
-      elif self.tokens[-1][1] == 's':
-        # self.tokens[-1] is computed at this level
-        if func in self.post_dominators[self.tokens[-1][0]]:
-          self.store_at[self.tokens[-1][0]] = close_tokens[-1]
-        else:
-          self.store_at[self.tokens[-1][0]] = None
-        out.append(self.tokens.pop())
-      else:
-        # improper nesting, just close the loop and search/delete close_tokens
-        out.extend(reversed(close_tokens))
-        self.tokens = [x for x in self.tokens if x not in close_tokens]
-        break
-
-
-class HalideConfigurationManipulator(ConfigurationManipulator):
-  def __init__(self, halide_tuner):
-    super(HalideConfigurationManipulator, self).__init__()
-    self.halide_tuner = halide_tuner
-
-  def hash_config(self, config):
-    """
-    Multiple configs can lead to the same schedule, so we provide a custom
-    hash function that hashes the resulting schedule instead of the raw config.
-    This will lead to fewer duplicate tests.
-    """
-    self.normalize(config)
-    try:
-      schedule = self.halide_tuner.cfg_to_schedule(config)
-      return hashlib.sha256(schedule).hexdigest()
-    except:
-      log.warning('error hashing config', exc_info=True)
-      return super(HalideConfigurationManipulator, self).hash_config(config)
-
-
-class HalideComputeAtScheduleParameter(ScheduleParameter):
-  def __init__(self, name, args, functions, post_dominators):
-    """
-    Custom ScheduleParameter that normalizes using ComputeAtStoreAtParser
-    """
-    super(HalideComputeAtScheduleParameter, self).__init__(
-      name, *self.gen_nodes_deps(args, functions))
-    self.post_dominators = post_dominators
-
-  def gen_nodes_deps(self, args, functions):
-    """
-    Compute the list of nodes and point-to-point deps to provide to base class
-    """
-    nodes = list()
-    deps = collections.defaultdict(list)
-    for func in functions:
-      last = None
-      for idx in reversed(['c'] + # 'c' = compute location (and close loops)
-          range(1, len(func['vars']) * args.nesting + 1) +
-          ['s']):  # 's' = storage location
-        name = (func['name'], idx)
-        if last is not None:
-          # variables must go in order
-          deps[last].append(name)
-        last = name
-        nodes.append(name)
-        if idx == 'c':
-          # computes must follow call graph order
-          for callee in func['calls']:
-            deps[(callee, 'c')].append(name)
-    return nodes, deps
-
-  def normalize(self, cfg):
-    """
-    First enforce basic point-to-point deps (in base class), then call
-    ComputeAtStoreAtParser to normalize schedule.
-    """
-    super(HalideComputeAtScheduleParameter, self).normalize(cfg)
-    cfg[self.name] = ComputeAtStoreAtParser(cfg[self.name],
-                                            self.post_dominators).tokens
-
-
-def post_dominators(settings):
-  """
-  Compute post dominator tree using textbook iterative algorithm for the
-  call graph defined in settings
-  """
-  functions = [f['name'] for f in settings['functions']]
-  calls = dict([(f['name'], set(f['calls'])) for f in settings['functions']])
-  inverse_calls = collections.defaultdict(set)
-  for k, callees in calls.items():
-    for v in callees:
-      inverse_calls[v].add(k)
-  dom = {functions[-1]: set([functions[-1]])}
-  for f in functions[:-1]:
-    dom[f] = set(functions)
-  change = True
-  while change:
-    change = False
-    for f in functions[:-1]:
-      old = dom[f]
-      dom[f] = set([f]) | reduce(
-        _ & _, [dom[c] for c in inverse_calls[f]], set(functions))
-      if old != dom[f]:
-        change = True
-  return dom
-
-
-def random_test(args):
-  """
-  Generate and run a random schedule
-  """
-
-  opentuner.tuningrunmain.init_logging()
-  m = HalideTuner(args)
-  cfg = m.manipulator().random()
-  pprint(cfg)
-  print
-  schedule = m.cfg_to_schedule(cfg)
-  print schedule
-  print
-  print 'Schedule', m.run_schedule(schedule, 30)
-  print 'Baseline', m.run_baseline()
-
-
-def random_source(args):
-  """
-  Dump the source code of a random schedule
-  """
-  opentuner.tuningrunmain.init_logging()
-  m = HalideTuner(args)
-  cfg = m.manipulator().random()
-  schedule = m.cfg_to_schedule(cfg)
-  source = m.schedule_to_source(schedule)
-  print source
-
-
-def main(args):
-  if args.random_test:
-    random_test(args)
-  elif args.random_source:
-    random_source(args)
-  elif args.make_settings_file:
-    opentuner.tuningrunmain.init_logging()
-    HalideTuner(args).make_settings_file()
-  else:
-    HalideTuner.main(args)
-
-
-if __name__ == '__main__':
-  main(parser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/timing_prefix.h b/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/timing_prefix.h
deleted file mode 100644
index d8bbc5f57b6177f3a88a28d57fef2d72bf8c3050..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/halide/timing_prefix.h
+++ /dev/null
@@ -1,100 +0,0 @@
-#include <Halide.h>
-#include <stdio.h>
-#include <sys/time.h>
-#include <unistd.h>
-
-#include <map>
-#include <string>
-
-// How many times to run (and take min)
-// #define AUTOTUNE_TRIALS 3
-
-// Limit in seconds to try running for (0 = no limit)
-// #define AUTOTUNE_LIMIT 0
-
-// Size to run with
-// #define AUTOTUNE_N 1024, 1024
-
-inline void _autotune_timing_stub(Halide::Func& func) {
-    func.compile_jit();
-
-    // TODO: this assumes scalar/non-Tuple outputs - should generalize to a Realization
-    std::vector<Halide::Type> out_types = func.output_types();
-    std::vector<buffer_t> out_raw_bufs;
-    std::vector<Halide::Buffer> out_bufs;
-
-    for (int i = 0; i < out_types.size(); i++) {
-        // Use the Buffer constructor as a helper to set up the buffer_t,
-        // but then throw away its allocation which we don't really want.
-        Halide::Buffer bufinit(out_types[i], AUTOTUNE_N);
-        out_raw_bufs.push_back(*bufinit.raw_buffer());
-        out_raw_bufs[i].host = NULL;
-        // TODO: free the host pointer?!
-        out_bufs.push_back(Halide::Buffer(out_types[i], &out_raw_bufs[i]));
-        assert(out_bufs[i].host_ptr() == NULL); // make sure we don't have an allocation
-    }
-    Halide::Realization output(out_bufs);
-    func.infer_input_bounds(output);
-    // assert(output[0].host_ptr()); // for now, the API doesn't seem to allocate outputs
-    
-    // TODO: this should go into Func::infer_input_bounds(Realization)
-    for (int i = 0; i < output.size(); i++) {
-        assert(!output[i].host_ptr()); // for now, the API doesn't seem to allocate outputs
-        buffer_t buf = *output[i].raw_buffer();
-        
-        // Figure out how much memory to allocate for this buffer
-        size_t min_idx = 0, max_idx = 0;
-        for (int d = 0; d < 4; d++) {
-            if (buf.stride[d] > 0) {
-                min_idx += buf.min[d] * buf.stride[d];
-                max_idx += (buf.min[d] + buf.extent[d] - 1) * buf.stride[d];
-            } else {
-                max_idx += buf.min[d] * buf.stride[d];
-                min_idx += (buf.min[d] + buf.extent[d] - 1) * buf.stride[d];
-            }
-        }
-        size_t total_size = (max_idx - min_idx);
-        while (total_size & 0x1f) total_size++;
-
-        // Allocate enough memory with the right dimensionality.
-        Halide::Buffer buffer(output[i].type(), total_size,
-                      buf.extent[1] > 0 ? 1 : 0,
-                      buf.extent[2] > 0 ? 1 : 0,
-                      buf.extent[3] > 0 ? 1 : 0);
-
-        // Rewrite the buffer fields to match the ones returned
-        for (int d = 0; d < 4; d++) {
-            buffer.raw_buffer()->min[d] = buf.min[d];
-            buffer.raw_buffer()->stride[d] = buf.stride[d];
-            buffer.raw_buffer()->extent[d] = buf.extent[d];
-        }
-        
-        output[i] = buffer;
-    }
-
-    timeval t1, t2;
-    double rv = 0;
-    const unsigned int timeout = AUTOTUNE_LIMIT;
-    alarm(timeout);
-    for (int i = 0; i < AUTOTUNE_TRIALS; i++) {
-      gettimeofday(&t1, NULL);
-      func.realize(output);
-      gettimeofday(&t2, NULL);
-      alarm(0); // disable alarm
-      double t = (t2.tv_sec - t1.tv_sec) + (t2.tv_usec - t1.tv_usec)/1000000.0;
-      if(i == 0 || t < rv)
-        rv = t;
-    }
-    printf("{\"time\": %.10f}\n", rv);
-    exit(0);
-}
-
-
-#ifndef AUTOTUNE_HOOK
-#define AUTOTUNE_HOOK(x)
-#endif
-
-#ifndef BASELINE_HOOK
-#define BASELINE_HOOK(x)
-#endif
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/hpl/HPL.dat.mako b/llvm/projects/hpvm-tensor-rt/opentuner/examples/hpl/HPL.dat.mako
deleted file mode 100644
index 93354a2292a3bb3ddec1e2278e49b039b72d6bb1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/hpl/HPL.dat.mako
+++ /dev/null
@@ -1,31 +0,0 @@
-HPLinpack benchmark input file
-Innovative Computing Laboratory, University of Tennessee
-HPL.out      output file name (if any)
-0            device out (6=stdout,7=stderr,file)
-1            # of problems sizes (N)
-${size}        Ns
-1            # of NBs
-${blocksize}		      NBs
-${row_or_colmajor_pmapping}            PMAP process mapping (0=Row-,1=Column-major)
-1            # of process grids (P x Q)
-2	        Ps  PxQ must equal nprocs
-2           Qs
-16.0         threshold
-1            # of panel fact
-${pfact}            PFACTs (0=left, 1=Crout, 2=Right)
-1            # of recursive stopping criterium
-${nbmin}            NBMINs (>= 1)
-1            # of panels in recursion
-${ndiv}            NDIVs
-1            # of recursive panel fact.
-${rfact}            RFACTs (0=left, 1=Crout, 2=Right)
-1            # of broadcast
-${bcast}            BCASTs (0=1rg,1=1rM,2=2rg,3=2rM,4=Lng,5=LnM)
-1            # of lookahead depth
-${depth}            DEPTHs (>=0)
-${swap}            SWAP (0=bin-exch,1=long,2=mix)
-${swapping_threshold}           swapping threshold (default had 64)
-${L1_transposed}            L1 in (0=transposed,1=no-transposed) form
-${U_transposed}            U  in (0=transposed,1=no-transposed) form
-1            Equilibration (0=no,1=yes)
-${mem_alignment}            memory alignment in double (> 0) (4,8,16)
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/hpl/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/hpl/adddeps.py
deleted file mode 100644
index ede22a8fcdb2a94db7915ff3beb90894b2cb8592..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/hpl/adddeps.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/hpl/hpl.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/hpl/hpl.py
deleted file mode 100644
index 4cbbe798249b61eae23b5142337a056ef58e83bd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/hpl/hpl.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import adddeps #fix sys.path
-
-import argparse
-import logging
-
-import opentuner
-from opentuner.search.manipulator import (ConfigurationManipulator,
-                                          IntegerParameter,
-                                          FloatParameter)
-from opentuner.search.objective import MinimizeTime
-from opentuner.measurement import MeasurementInterface
-from opentuner.measurement.inputmanager import FixedInputManager
-from opentuner.tuningrunmain import TuningRunMain
-
-log = logging.getLogger(__name__)
-
-parser = argparse.ArgumentParser(parents=opentuner.argparsers())
-
-parser.add_argument('--size', type=int, default=800,
-                    help='dimensions for the HPL matrix')
-parser.add_argument('--nprocs', type=int, default=4,
-                    help='number of processors for each HPL run (minimum=4)')
-parser.add_argument('--xhpl', type=str, default="hpl-2.1/bin/OSX/xhpl",
-                    help='location of xhpl binary')
-
-class HPLinpack(MeasurementInterface):
-    def run(self, desired_result, input, limit):
-        self.output_hpl_datfile(desired_result.configuration.data)
-        import subprocess, os
-        binary = self.args.xhpl
-        subprocess.call(["mpirun", "-np", str(self.args.nprocs), binary])
-        
-        val = self.get_time_from_hpl_output()
-        
-        return opentuner.resultsdb.models.Result(time=val)
-        
-    def manipulator(self):
-        #FIXME: should some of these be expressed as booleans or switch parameters?
-        #FIXME: how to express P and Q, given PxQ=nprocs, with nprocs being fixed?
-        #FIXME: how to express logscaled parameter with a particular base?
-        manipulator = ConfigurationManipulator()
-        manipulator.add_parameter(IntegerParameter("blocksize", 1, 64))
-        manipulator.add_parameter(IntegerParameter("row_or_colmajor_pmapping", 0, 1))
-        manipulator.add_parameter(IntegerParameter("pfact", 0, 2))
-        manipulator.add_parameter(IntegerParameter("nbmin", 1, 4))
-        manipulator.add_parameter(IntegerParameter("ndiv", 2, 2))
-        manipulator.add_parameter(IntegerParameter("rfact", 0, 4))
-        manipulator.add_parameter(IntegerParameter("bcast", 0, 5))
-        manipulator.add_parameter(IntegerParameter("depth", 0, 4))
-        manipulator.add_parameter(IntegerParameter("swap", 0, 2))
-        manipulator.add_parameter(IntegerParameter("swapping_threshold", 64, 128))
-        manipulator.add_parameter(IntegerParameter("L1_transposed", 0, 1))
-        manipulator.add_parameter(IntegerParameter("U_transposed", 0, 1))
-        manipulator.add_parameter(IntegerParameter("mem_alignment", 4, 16))
-        
-        return manipulator
-        
-    def output_hpl_datfile(self, params):
-        """HPL uses an input file to express the parameters, and this uses mako to render it."""
-        params["size"] = self.args.size
-        from mako.template import Template
-        template = Template(filename="HPL.dat.mako")
-        with open("HPL.dat", "w") as f:
-            f.write(template.render(**params))
-            
-    def get_time_from_hpl_output(self, fname="HPL.out"):
-        """Returns the elapsed time only, from the HPL output file"""
-        #FIXME: clean up with REs
-        elapsed = 0.0
-        with open(fname) as f:
-            line = f.readline()
-            while (line[0:3] != "T/V"):
-                line = f.readline()
-            line = f.readline()
-            while (line[0:3] != "T/V"):
-                line = f.readline()
-            f.readline() # line of dashes
-            splitted = f.readline().split()
-            elapsed = float(splitted[5])
-        
-        return elapsed
-                    
-    
-    def program_name(self):
-        return "HPL"
-    
-    def program_version(self):
-      return "size=%d,nprocs=%d" % (self.args.size, self.args.nprocs)
-
-    def save_final_config(self, configuration):
-      '''
-      called at the end of autotuning with the best resultsdb.models.Configuration
-      '''
-      print "Final configuration", configuration.data
-            
-if __name__ == '__main__':
-  args = parser.parse_args()
-  HPLinpack.main(args)
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/README.md b/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/README.md
deleted file mode 100644
index f094e987f5e48d72aef426b39ef268e86f47e3c0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/README.md
+++ /dev/null
@@ -1,33 +0,0 @@
-This is an OpenTuner-based tuner that learns a series of button presses that complete the first level of Super Mario Bros. for the original Nintendo Entertainment System.
-
-## Dependencies
-
-- FCEUX, a NES emulator
-- `xvfb-run`, to run the emulator headless (optional, but speeds up tuning)
-- Super Mario Bros., assumed to be named `smb.nes`, which we can't help you get for legal reasons
-
-## Running
-
-Run the tuner with `./mario.py --technique=PSO_GA_Bandit`; it will launch FCEUX to run trials.  You can experiment with other techniques or `--parallelism` (the number of trials to run in parallel) too.
-
-You can implement your own configuration representation by subclassing Representation and passing `--representation=YourRepresentation`.  Your Representation class needs to provide a ConfigurationManipulator populated with parameters and a method to translate these parameters to button presses.  There are already a few representations implemented to use as examples.
-
-You can implement your own fitness function by subclassing FitnessFunction and passing `--fitness-function=YourFunction`.  Your function receives a win/loss boolean, the number of pixels moved to the right when the trial ended, and the number of frames that elapsed during the trial.  Lower fitness scores are better.  There are a few existing fitness functions; in particular, `ProgressTimesAverageSpeed` also tries to optimize speed.
-
-If you want to watch the trials (or don't have `xvfb-run` available), pass `--headful`.
-
-## Playing the results
-
-When a tuning run completes, the best configuration (as judged by the fitness function) is written to `<hostname>-<tuningrun>.fm2`.  This file can be played back in FCEUX to watch the best configuration.
-
-You can also use the `--tuning-run=` option (passing the tuning run number in the best configuration `.fm2`) to generate a new-bests `.fm2`, which will contain each tuning trial that was the best configuration found so far during the tuning run, concatenated back-to-back.  You also need to pass `--database` pointing to the database containing that tuning run, and if you passed `--representation` or `--fitness-function` during the tuning run, you need to pass the same values for those parameters.  So your final command might look like `./mario.py --tuning-run=42 --database=opentuner.db/hostname.db --representation=NaiveRepresentation --fitness-function=ProgressTimesAverageSpeed > new-bests-42.fm2`.
-
-## TODO
-
-- use the [fm2 format](http://www.fceux.com/web/help/fceux.html?fm2.html)'s subtitle support in new-bests movies to show run number and fitness score
-
-## Links
-
-- [Videos showing OpenTuner playing Super Mario Bros](https://www.youtube.com/playlist?list=PLngnz1zPEA08FWy8wF9JbGqjlm-elHmlb)
-- [Slides describing representation and results](http://groups.csail.mit.edu/commit/papers/2014/ansel-pact14-opentuner-slides.pdf) (see slide 16)
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/adddeps.py
deleted file mode 100644
index ede22a8fcdb2a94db7915ff3beb90894b2cb8592..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/adddeps.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/fceux-hook.lua b/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/fceux-hook.lua
deleted file mode 100644
index ce00288149936154f5dc64f94cdbcbcba04d4758..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/fceux-hook.lua
+++ /dev/null
@@ -1,25 +0,0 @@
-player_state_addr = 0x000E;
-player_state_dying = 6;
-player_float_addr = 0x001D;
-player_float_flagpole = 3;
-player_page_addr = 0x006D;
-player_horizpos_addr = 0x0086;
-minimum_frames = 197;
-
-emu.speedmode("maximum");
-while true do
-	if (emu.framecount() > minimum_frames) then
-		--dead?
-		local dead = memory.readbyte(player_state_addr) == player_state_dying;
-		--flagpole?
-		local won = memory.readbyte(player_float_addr) == player_float_flagpole;
-		if (dead or won) then
-			local str = (dead and "died" or "won");
-			local x_pos = math.floor(memory.readbyteunsigned(player_page_addr)*256 + memory.readbyteunsigned(player_horizpos_addr));
-			local framecount = emu.framecount();
-			io.write(str, " ", x_pos, " ", framecount, "\n");
-			os.exit(0);
-		end;
-	end;
-	emu.frameadvance();
-end
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/mario.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/mario.py
deleted file mode 100755
index d388321e1f3b4f70a5d1262f43b89702ea924c79..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/mario/mario.py
+++ /dev/null
@@ -1,341 +0,0 @@
-#!/usr/bin/env python2
-
-"""OpenTuner plays Super Mario Bros. for NES
-
-We write a movie file and ask the emulator to play it back while running
-fceux-hook.lua, which checks for death/flagpole and prints the fitness to
-stdout where OpenTuner, as the parent process, can read it.
-"""
-
-import adddeps #fix sys.path
-import argparse
-import base64
-import pickle
-import tempfile
-import subprocess
-import re
-import zlib
-import abc
-import sys
-import os
-import traceback
-import collections
-import socket
-
-import opentuner
-from opentuner.search.manipulator import ConfigurationManipulator, IntegerParameter, EnumParameter, BooleanParameter
-from opentuner.measurement import MeasurementInterface
-from opentuner.measurement.inputmanager import FixedInputManager
-from opentuner.tuningrunmain import TuningRunMain
-from opentuner.search.objective import MinimizeTime
-
-def instantiate(class_name):
-  return getattr(sys.modules[__name__], class_name)()
-
-argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-argparser.add_argument('--tuning-run', type=int, help='concatenate new bests from given tuning run into single movie')
-argparser.add_argument('--headful', action='store_true', help='run headful (not headless) for debugging or live demo')
-argparser.add_argument('--xvfb-delay', type=int, default=0, help='delay between launching xvfb and fceux')
-argparser.add_argument('--representation', default='DurationRepresentation', type=instantiate, help='name of representation class')
-argparser.add_argument('--fitness-function', default='Progress', type=instantiate, help='name of fitness function class')
-
-def call_or_die(command, failmsg=None):
-  try:
-    p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    stdout, stderr = p.communicate()
-    return stdout, stderr, p.returncode
-  except:
-    print "Failed to execute", command
-    traceback.print_exc()
-    print "Child traceback:"
-    print sys.exc_info()[1].child_traceback
-    if failmsg:
-      print failmsg
-    sys.exit(1)
-
-# Functions for building FCEUX movie files (.fm2 files)
-
-def fm2_line(up, down, left, right, a, b, start, select, reset=False):
-  """formats one frame of input with the given button presses"""
-  return ''.join(('|1|' if reset else '|0|') +
-    ('R' if right else '.') +
-    ('L' if left else '.') +
-    ('D' if down else '.') +
-    ('U' if up else '.') +
-    ('T' if start else '.') +
-    ('D' if select else '.') +
-    ('B' if b else '.') +
-    ('A' if a else '.') +
-    '|........||')
-
-def maxd(iterable, default):
-  try:
-    return max(iterable)
-  except ValueError:
-    return default
-
-def fm2_lines(up, down, left, right, a, b, start, select, reset=set(), minFrame=None, maxFrame=None):
-  """formats many frames using the given button-press sets"""
-  if minFrame is None:
-    minFrame = 0
-  if maxFrame is None:
-    maxFrame = max(maxd(up, 0), maxd(down, 0), maxd(left, 0), maxd(right, 0), maxd(a, 0), maxd(b, 0), maxd(start, 0), maxd(select, 0), maxd(reset, 0)) + 1
-  lines = list()
-  for i in xrange(minFrame, maxFrame):
-    lines.append(fm2_line(i in up, i in down, i in left, i in right, i in a, i in b, i in start, i in select, i in reset))
-  return lines
-
-def fm2_smb_header():
-  return ["version 3",
-    "emuVersion 9828",
-    "romFilename smb.nes",
-    "romChecksum base64:jjYwGG411HcjG/j9UOVM3Q==",
-    "guid 51473540-E9D7-11E3-ADFC-46CE3219C4E0",
-    "fourscore 0",
-    "port0 1",
-    "port1 1",
-    "port2 0"]
-
-def fm2_smb(left, right, down, b, a, header=True, padding=True, minFrame=None, maxFrame=None):
-  reset = set()
-  start = set()
-  if padding:
-    left = set([x+196 for x in left])
-    right = set([x+196 for x in right])
-    down = set([x+196 for x in down])
-    b = set([x+196 for x in b])
-    a = set([x+196 for x in a])
-    reset.add(0)
-    start.add(33)
-  lines = fm2_lines(set(), down, left, right, a, b, start, set(), reset, minFrame, maxFrame)
-  if header:
-    return "\n".join(fm2_smb_header() + lines)
-  else:
-    return "\n".join(lines)
-
-display_numbers = collections.deque()
-
-def run_movie(fm2, args):
-  with tempfile.NamedTemporaryFile(suffix=".fm2", delete=True) as f:
-    f.write(fm2)
-    f.flush()
-    cmd = []
-    if not args.headful:
-      display = display_numbers.pop()
-      cmd += ["xvfb-run", "-n", display, "-w", str(args.xvfb_delay), "-e", "/dev/stderr"]
-    cmd += ["fceux", "--playmov", f.name, "--loadlua",
-        "fceux-hook.lua", "--nogui", "--volume", "0", "--no-config", "1",
-        "smb.nes"]
-    stdout, stderr, returncode = call_or_die(cmd)
-    if not args.headful:
-      display_numbers.append(display)
-  match = re.search(r"^(won|died) (\d+) (\d+)$", stdout, re.MULTILINE)
-  if not match:
-    print stderr
-    print stdout
-    raise ValueError
-  wl = match.group(1)
-  x_pos = int(match.group(2))
-  framecount = int(match.group(3))
-  return (wl, x_pos, framecount)
-
-class Representation(object):
-  """Interface for pluggable tuning representations."""
-  __metaclass__ = abc.ABCMeta
-
-  @abc.abstractmethod
-  def manipulator():
-    """Return a ConfigurationManipulator for this representation."""
-    pass
-
-  @abc.abstractmethod
-  def interpret(cfg):
-    """Unpack this representation into button-press sets (L, R, D, B, A)."""
-    pass
-
-class NaiveRepresentation(Representation):
-  """Uses a parameter per (button, frame) pair."""
-  def manipulator(self):
-    m = ConfigurationManipulator()
-    for i in xrange(0, 12000):
-      m.add_parameter(BooleanParameter('L{}'.format(i)))
-      m.add_parameter(BooleanParameter('R{}'.format(i)))
-      m.add_parameter(BooleanParameter('D{}'.format(i)))
-      m.add_parameter(BooleanParameter('B{}'.format(i)))
-      m.add_parameter(BooleanParameter('A{}'.format(i)))
-    return m
-
-  def interpret(self, cfg):
-    left = set()
-    right = set()
-    down = set()
-    running = set()
-    jumping = set()
-    for i in xrange(0, 12000):
-      if cfg['L{}'.format(i)]:
-        left.add(i)
-      if cfg['R{}'.format(i)]:
-        right.add(i)
-      if cfg['D{}'.format(i)]:
-        down.add(i)
-      if cfg['B{}'.format(i)]:
-        running.add(i)
-      if cfg['A{}'.format(i)]:
-        jumping.add(i)
-    return left, right, down, running, jumping
-
-class DurationRepresentation(Representation):
-  def manipulator(self):
-    m = ConfigurationManipulator()
-    for i in xrange(0, 1000):
-      #bias 3:1 in favor of moving right
-      m.add_parameter(EnumParameter('move{}'.format(i), ["R", "L", "RB", "LB", "N", "LR", "LRB", "R2", "RB2", "R3", "RB3"]))
-      m.add_parameter(IntegerParameter('move_duration{}'.format(i), 1, 60))
-      #m.add_parameter(BooleanParameter("D"+str(i)))
-    for i in xrange(0, 1000):
-      m.add_parameter(IntegerParameter('jump_frame{}'.format(i), 0, 24000))
-      m.add_parameter(IntegerParameter('jump_duration{}'.format(i), 1, 32))
-    return m
-
-  def interpret(self, cfg):
-    left = set()
-    right = set()
-    down = set()
-    running = set()
-    start = 0
-    for i in xrange(0, 1000):
-      move = cfg['move{}'.format(i)]
-      move_duration = cfg['move_duration{}'.format(i)]
-      if "R" in move:
-        right.update(xrange(start, start + move_duration))
-      if "L" in move:
-        left.update(xrange(start, start + move_duration))
-      if "B" in move:
-        running.update(xrange(start, start + move_duration))
-      start += move_duration
-    jumping = set()
-    for i in xrange(0, 1000):
-      jump_frame = cfg['jump_frame{}'.format(i)]
-      jump_duration = cfg['jump_duration{}'.format(i)]
-      jumping.update(xrange(jump_frame, jump_frame + jump_duration))
-    return left, right, down, running, jumping
-
-class AlphabetRepresentation(Representation):
-  def manipulator(self):
-    m = ConfigurationManipulator()
-    for i in xrange(0, 400*60):
-      m.add_parameter(EnumParameter('{}'.format(i), xrange(0, 16)))
-    return m
-
-  def interpret(self, cfg):
-    left = set()
-    right = set()
-    down = set()
-    running = set()
-    jumping = set()
-    for i in xrange(0, 400*60):
-      bits = cfg[str(i)]
-      if bits & 1:
-        left.add(i)
-      if bits & 2:
-        right.add(i)
-      if bits & 4:
-        running.add(i)
-      if bits & 8:
-        jumping.add(i)
-      #if bits & 16:
-      #  down.add(i)
-    return left, right, down, running, jumping
-
-class FitnessFunction(object):
-  """Interface for pluggable fitness functions."""
-  __metaclass__ = abc.ABCMeta
-
-  @abc.abstractmethod
-  def __call__(won, x_pos, elapsed_frames):
-    """Return the fitness (float, lower is better)."""
-    pass
-
-class Progress(FitnessFunction):
-  def __call__(self, won, x_pos, elapsed_frames):
-    return -float(x_pos)
-
-class ProgressPlusTimeRemaining(FitnessFunction):
-  def __call__(self, won, x_pos, elapsed_frames):
-    """x_pos plus 1 for each frame remaining on the timer on a win.  This results in a large discontinuity at wins.  This was the fitness function used for the OpenTuner paper, though the paper only discussed time-to-first-win."""
-    return -float(x_pos + 400*60 - elapsed_frames) if won else -float(x_pos)
-
-class ProgressTimesAverageSpeed(FitnessFunction):
-  def __call__(self, won, x_pos, elapsed_frames):
-    return -x_pos * (float(x_pos)/elapsed_frames)
-
-class SMBMI(MeasurementInterface):
-  def __init__(self, args):
-    super(SMBMI, self).__init__(args)
-    self.parallel_compile = True
-    self.args = args
-
-  def manipulator(self):
-    return self.args.representation.manipulator()
-
-  def compile(self, cfg, id):
-    left, right, down, running, jumping = self.args.representation.interpret(cfg)
-    fm2 = fm2_smb(left, right, down, running, jumping)
-    try:
-      wl, x_pos, framecount = run_movie(fm2, self.args)
-    except ValueError:
-      return opentuner.resultsdb.models.Result(state='ERROR', time=float('inf'))
-    print wl, x_pos, framecount
-    return opentuner.resultsdb.models.Result(state='OK', time=self.args.fitness_function("won" in wl, x_pos, framecount))
-
-  def run_precompiled(self, desired_result, input, limit, compile_result, id):
-    return compile_result
-
-  def run(self, desired_result, input, limit):
-    pass
-
-  def save_final_config(self, cfg):
-    left, right, down, running, jumping = args.representation.interpret(cfg.data)
-    fm2 = fm2_smb(left, right, down, running, jumping)
-    _, _, framecount = run_movie(fm2, self.args)
-    filename = '{}-{}.fm2'.format(socket.gethostname(), self.driver.tuning_run.id)
-    with open(filename, 'w') as f:
-      f.write(fm2_smb(left, right, down, running, jumping, maxFrame=framecount))
-
-def new_bests_movie(args):
-  stdout, stderr, returncode = call_or_die(["sqlite3", args.database, "select configuration_id from result where tuning_run_id = %d and was_new_best = 1 order by collection_date;" % args.tuning_run])
-  if returncode:
-    print "Error retrieving new-best configurations:", stderr
-    sys.exit(1)
-  cids = stdout.split()
-  print '\n'.join(fm2_smb_header())
-  for cid in cids:
-    stdout, stderr, returncode = call_or_die(["sqlite3", args.database, "select quote(data) from configuration where id = %d;" % int(cid)])
-    if returncode:
-      print "Error retriving configuration data:", cid, stderr
-      sys.exit(1)
-    cfg = pickle.loads(zlib.decompress(base64.b16decode(stdout.strip()[2:-1])))
-    left, right, down, running, jumping = args.representation.interpret(cfg)
-    fm2 = fm2_smb(left, right, down, running, jumping)
-    _, _, framecount = run_movie(fm2, args)
-    print fm2_smb(left, right, down, running, jumping, header=False, maxFrame=framecount)
-
-if __name__ == '__main__':
-  args = argparser.parse_args()
-  call_or_die(["fceux", "--help"], failmsg="Is fceux on your PATH?")
-  if not args.headful:
-    call_or_die(["xvfb-run", "--help"], failmsg="Is xvfb-run on your PATH? (or, pass --headful)")
-    for n in xrange(99, 99 + args.parallelism):
-      display_numbers.append(str(n))
-  if args.tuning_run:
-    call_or_die(["sqlite3", "-version"], failmsg="Is sqlite3 on your PATH?")
-    if args.database is not None:
-      new_bests_movie(args)
-    else:
-      print "must specify --database"
-  else:
-    if os.path.isfile('smb.nes'):
-      SMBMI.main(args)
-    else:
-      print "smb.nes not found"
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/.gitignore b/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/.gitignore
deleted file mode 100644
index a6e67132d61e0bd837b953376dc866031d5f742a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-linux_x86_64
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/README.md b/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/README.md
deleted file mode 100644
index e4b446468658463cab275862143c95add38d0eb5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
-Source for PetaBricks binaries can be found at:
-  - https://github.com/petabricks/petabricks/
-  - https://code.google.com/p/petabricks/
-
-
-Basic usage for running the raw programs is:
-```
-./Prog --config=CONFIG -n=N --time --accuracy --max-sec=TIMEOUT --trials=1
-
---config=<STRING>
-    filename of the program configuration (see example in .cfg.default file)
---n=<INTEGER>
-    generate a random input of the given size and run it
---time
-    print timing results in xml format
---accuracy
-    print out accuracy of answer
---max-sec=<NUMBER> (default: 1.79769e+308)
-    terminate measurement if it exceeds the given number of seconds
-
-many more options are given by running ./Prog --help
-```
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/adddeps.py
deleted file mode 100644
index ede22a8fcdb2a94db7915ff3beb90894b2cb8592..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/adddeps.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/deps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/deps.py
deleted file mode 100644
index c03a106a85827c1c4faed505b78d4f18e168c7e9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/deps.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import os
-import sys
-
-project_root = os.path.normpath(os.path.join(
-    os.path.dirname(os.path.abspath(__file__)), '../..'))
-sys.path.insert(0, project_root)
-
-
-try:
-  from lxml import etree
-except ImportError:
-  try:
-    # Python 2.5
-    import xml.etree.cElementTree as etree
-  except ImportError:
-    import xml.etree.ElementTree as etree
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/download_benchmarks.sh b/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/download_benchmarks.sh
deleted file mode 100755
index aaf333b455a0414575b338625e45b58db8188c5b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/download_benchmarks.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-if test -e linux_x86_64
-then
-  echo "benchmarks already downloaded"
-else
-  wget -O- http://people.csail.mit.edu/jansel/petabricks_benchmarks_linux_x86_64.tar.bz2 | tar jxv
-fi
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/import_old_result.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/import_old_result.py
deleted file mode 100755
index 9add4ed035c50e2f7f21dc79f6af89571137b257..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/import_old_result.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env python
-
-import adddeps  # fix sys.path
-
-import argparse
-import json
-import logging
-import os
-import re
-import sys
-import uuid
-import subprocess
-
-try:
-  from lxml import etree
-except ImportError:
-  import xml.etree.ElementTree as etree
-
-import opentuner
-from opentuner import resultsdb
-from datetime import datetime
-from datetime import timedelta
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-
-log = logging.getLogger(__name__)
-
-argparser = argparse.ArgumentParser()
-argparser.add_argument('--database', default='opentuner.db/import.db')
-argparser.add_argument('--limit', type=float, default=10)
-argparser.add_argument('program')
-argparser.add_argument('candidatelog')
-
-
-def run(args, cfg):
-  limit = args.limit
-  cmd = [args.program,
-         '--time',
-         '--accuracy',
-         '--config=' + cfg,
-         '--max-sec=%.10f' % args.limit,
-         '-n=%d' % args.n]
-  p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-  out, err = p.communicate()
-
-  result = opentuner.resultsdb.models.Result()
-  try:
-    root = etree.XML(out)
-    result.time = float(root.find('stats/timing').get('average'))
-    result.accuracy = float(root.find('stats/accuracy').get('average'))
-    if result.time < limit + 3600:
-      result.state = 'OK'
-    else:
-      # time will be 2**31 if timeout
-      result.state = 'TIMEOUT'
-  except:
-    log.exception('run error')
-    log.warning('program crash, out = %s / err = %s', out, err)
-    result.state = 'ERROR'
-    result.time = float('inf')
-    result.accuracy = float('-inf')
-  return result
-
-
-def main(args):
-  if '://' not in args.database:
-    args.database = 'sqlite:///' + args.database
-  engine, Session = opentuner.resultsdb.connect(args.database)
-  session = Session()
-
-  program_settings = json.load(open(args.program + '.settings'))
-  args.n = program_settings['n']
-  args.technique = ['Imported']
-  objective = ThresholdAccuracyMinimizeTime(program_settings['accuracy'])
-
-  tuningrun = resultsdb.models.TuningRun(
-    uuid=uuid.uuid4().hex,
-    name='import',
-    args=args,
-    start_date=datetime.now(),
-    objective=objective,
-    program_version=resultsdb.models.ProgramVersion.get(
-      session, 'PetaBricksInterface', args.program, 'imported'),
-    state='COMPLETE',
-  )
-  session.add(tuningrun)
-
-  for gen, line in enumerate(open(args.candidatelog)):
-    if line[0] != '#':
-      line = re.split('\t', line)
-      date = tuningrun.start_date + timedelta(seconds=float(line[0]))
-      cfg = os.path.normpath(
-        os.path.join(os.path.dirname(args.candidatelog), '..', line[5]))
-      result = run(args, cfg)
-      result.was_new_best = True
-      result.tuning_run = tuningrun
-      result.collection_date = date
-      session.add(result)
-      desired_result = resultsdb.models.DesiredResult(
-        limit=args.limit,
-        tuning_run=tuningrun,
-        generation=gen,
-        requestor='Imported',
-        request_date=date,
-        start_date=date,
-        result=result,
-        state='COMPLETE')
-      session.add(desired_result)
-      tuningrun.end_date = date
-      print gen, date, result.time
-
-  session.commit()
-
-
-if __name__ == '__main__':
-  opentuner.tuningrunmain.init_logging()
-  sys.exit(main(argparser.parse_args()))
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/pbtuner.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/pbtuner.py
deleted file mode 100755
index 7163294494eec42b73a232b6ee00fb7164d2c691..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/pbtuner.py
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python
-
-import adddeps  # fix sys.path
-
-import re
-import argparse
-import logging
-import subprocess
-import tempfile
-import json
-from pprint import pprint
-
-import opentuner
-from opentuner.search.manipulator import (ConfigurationManipulator,
-                                          IntegerParameter,
-                                          LogIntegerParameter,
-                                          FloatParameter,
-                                          LogFloatParameter,
-                                          SelectorParameter,
-                                          SwitchParameter,
-                                          PermutationParameter, )
-
-try:
-  from lxml import etree
-except ImportError:
-  import xml.etree.ElementTree as etree
-
-from opentuner.measurement import MeasurementInterface
-from opentuner.measurement.inputmanager import FixedInputManager
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-
-log = logging.getLogger("pbtuner")
-
-parser = argparse.ArgumentParser(parents=opentuner.argparsers())
-parser.add_argument('program',
-                    help='PetaBricks binary program to autotune')
-parser.add_argument('--program-cfg-default',
-                    help="override default program config exemplar location")
-parser.add_argument('--program-cfg-output',
-                    help="location final autotuned configuration is written")
-parser.add_argument('--program-settings',
-                    help="override default program settings file location")
-parser.add_argument('--program-input',
-                    help="use only a given input for autotuning")
-parser.add_argument('--upper-limit', type=float, default=30,
-                    help="time limit to apply to initial test")
-parser.add_argument('--test-config', action='store_true')
-
-
-class PetaBricksInterface(MeasurementInterface):
-  def __init__(self, args):
-    self.program_settings = json.load(open(args.program_settings))
-    input_manager = FixedInputManager(size=self.program_settings['n'])
-    objective = ThresholdAccuracyMinimizeTime(self.program_settings['accuracy'])
-
-    # pass many settings to parent constructor
-    super(PetaBricksInterface, self).__init__(
-        args, program_name=args.program,
-        program_version=self.file_hash(args.program),
-        input_manager=input_manager, objective=objective)
-
-  def build_config(self, cfg):
-    r = dict()
-
-    # direct copy
-    for k, v in cfg.iteritems():
-      if k[0] != '.':
-        r[k] = v
-
-    for name, choices in self.choice_sites.items():
-      param = self.manipulator.parameters_dict(cfg)['.' + name]
-      lvl = 0
-      for cutoff, choice in param.selector_iter(cfg):
-        lvl += 1
-        r['%s_lvl%d_rule' % (name, lvl)] = choice
-        if lvl > 1:
-          r['%s_lvl%d_cutoff' % (name, lvl)] = cutoff
-
-    return r
-
-  def run(self, desired_result, input, limit):
-    limit = min(limit, self.args.upper_limit)
-    with tempfile.NamedTemporaryFile(suffix='.petabricks.cfg') as cfgtmp:
-      for k, v in self.build_config(desired_result.configuration.data).items():
-        print >> cfgtmp, k, '=', v
-      cfgtmp.flush()
-      if args.program_input:
-        input_opts = ['--iogen-run=' + args.program_input,
-                      '--iogen-n=%d' % input.input_class.size]
-      else:
-        input_opts = ['-n=%d' % input.input_class.size]
-
-      cmd = [args.program,
-             '--time',
-             '--accuracy',
-             '--max-sec=%.8f' % limit,
-             '--config=' + cfgtmp.name] + input_opts
-      log.debug("cmd: %s", ' '.join(cmd))
-      p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-      out, err = p.communicate()
-
-    result = opentuner.resultsdb.models.Result()
-    try:
-      root = etree.XML(out)
-      result.time = float(root.find('stats/timing').get('average'))
-      result.accuracy = float(root.find('stats/accuracy').get('average'))
-      if result.time < limit + 3600:
-        result.state = 'OK'
-      else:
-        #time will be 2**31 if timeout
-        result.state = 'TIMEOUT'
-    except:
-      log.warning("program crash, out = %s / err = %s", out, err)
-      result.state = 'ERROR'
-      result.time = float('inf')
-      result.accuracy = float('-inf')
-    return result
-
-  def save_final_config(self, configuration):
-    """
-    called at the end of autotuning with the best
-    resultsdb.models.Configuration
-    """
-    with open(args.program_cfg_output, 'w') as fd:
-      cfg = self.build_config(configuration.data)
-      for k, v in sorted(cfg.items()):
-        print >> fd, k, '=', v
-    log.info("final configuration written to %s", args.program_cfg_output)
-
-  def manipulator(self):
-    """create the configuration manipulator, from example config"""
-    upper_limit = self.program_settings['n'] + 1
-    cfg = open(self.args.program_cfg_default).read()
-    manipulator = ConfigurationManipulator()
-
-    self.choice_sites = dict()
-
-    for m in re.finditer(r" *([a-zA-Z0-9_-]+)[ =]+([0-9e.+-]+) *"
-                         r"[#] *([a-z]+).* ([0-9]+) to ([0-9]+)", cfg):
-      k, v, valtype, minval, maxval = m.group(1, 2, 3, 4, 5)
-      minval = float(minval)
-      maxval = float(maxval)
-      if upper_limit:
-        maxval = min(maxval, upper_limit)
-      assert valtype == 'int'
-      #log.debug("param %s %f %f", k, minval, maxval)
-
-      m1 = re.match(r'(.*)_lvl[0-9]+_rule', k)
-      m2 = re.match(r'(.*)_lvl[0-9]+_cutoff', k)
-      if m1:
-        self.choice_sites[m1.group(1)] = int(maxval)
-      elif m2:
-        pass
-      elif k == 'worker_threads':
-        manipulator.add_parameter(IntegerParameter(k, 1, 16))
-      elif k == 'distributedcutoff':
-        pass
-      elif minval == 0 and maxval < 64:
-        manipulator.add_parameter(SwitchParameter(k, maxval))
-      else:
-        manipulator.add_parameter(LogIntegerParameter(k, minval, maxval))
-
-    for name, choices in self.choice_sites.items():
-      manipulator.add_parameter(
-        SelectorParameter('.' + name, range(choices + 1),
-                          upper_limit / choices))
-
-    self.manipulator = manipulator
-    return manipulator
-
-  def test_config(self):
-    pprint(self.manipulator().random())
-
-
-if __name__ == '__main__':
-  args = parser.parse_args()
-  if not args.program_cfg_default:
-    args.program_cfg_default = args.program + '.cfg.default'
-  if not args.program_cfg_output:
-    args.program_cfg_output = args.program + '.cfg'
-  if not args.program_settings:
-    args.program_settings = args.program + '.settings'
-  if args.test_config:
-    PetaBricksInterface(args).test_config()
-  else:
-    PetaBricksInterface.main(args)
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/testwrapper.sh b/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/testwrapper.sh
deleted file mode 100755
index 2b6a94e57a6b4205638dd0560da79482494ac20b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/petabricks/testwrapper.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-COUNT=50
-for Z in `seq $COUNT`
-do
-  for T in `./pbtuner.py --list-techniques $@`;
-  do
-    echo $Z/$COUNT $T
-    ./pbtuner.py --technique=$T $@
-  done
-done
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/py_api/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/py_api/adddeps.py
deleted file mode 100644
index ede22a8fcdb2a94db7915ff3beb90894b2cb8592..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/py_api/adddeps.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/py_api/api_example.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/py_api/api_example.py
deleted file mode 100755
index e87a8fffe1544714247b4435a3b5ed7d3f92eb03..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/py_api/api_example.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/python
-"""
-Examples usage of a Python API interface to opentuner.
-
-Unlike the other examples, this code lets the user control the main() of
-the program and calls into opentuner to get new configurations to test.
-"""
-
-import adddeps  # add opentuner to path in dev mode
-
-import opentuner
-from opentuner.api import TuningRunManager
-from opentuner.measurement.interface import DefaultMeasurementInterface
-from opentuner.resultsdb.models import Result
-from opentuner.search.manipulator import ConfigurationManipulator
-from opentuner.search.manipulator import IntegerParameter
-import logging
-import argparse
-
-log = logging.getLogger(__name__)
-
-
-def test_func(cfg):
-  x = cfg['x']
-  y = (x - 10) * (x - 10)
-  log.debug("f({}) -> {}".format(x, y))
-  return y
-
-
-def main():
-    parser = argparse.ArgumentParser(parents=opentuner.argparsers())
-    args = parser.parse_args()
-    manipulator = ConfigurationManipulator()
-    manipulator.add_parameter(IntegerParameter('x', -200, 200))
-    interface = DefaultMeasurementInterface(args=args,
-                                            manipulator=manipulator,
-                                            project_name='examples',
-                                            program_name='api_test',
-                                            program_version='0.1')
-    api = TuningRunManager(interface, args)
-    for x in xrange(500):
-        desired_result = api.get_next_desired_result()
-        if desired_result is None:
-          # The search space for this example is very small, so sometimes
-          # the techniques have trouble finding a config that hasn't already
-          # been tested.  Change this to a continue to make it try again.
-          break
-        cfg = desired_result.configuration.data
-        result = Result(time=test_func(cfg))
-        api.report_result(desired_result, result)
-
-    best_cfg = api.get_best_configuration()
-    api.finish()
-    print 'best x found was', best_cfg['x']
-
-if __name__ == '__main__':
-  main()
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/py_api/multiple_tuning_runs.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/py_api/multiple_tuning_runs.py
deleted file mode 100755
index 5e0918e3afe49ce7a819f36312d770cdb73a5003..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/py_api/multiple_tuning_runs.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/python
-"""
-Examples usage of a Python API interface to opentuner.
-
-Unlike the other examples, this code lets the user control the main() of
-the program and calls into opentuner to get new configurations to test.
-
-This version runs multiple tuning runs at once in a single process.
-"""
-
-import adddeps  # add opentuner to path in dev mode
-
-import opentuner
-from opentuner.api import TuningRunManager
-from opentuner.measurement.interface import DefaultMeasurementInterface
-from opentuner.resultsdb.models import Result
-from opentuner.search.manipulator import ConfigurationManipulator
-from opentuner.search.manipulator import IntegerParameter
-import logging
-import argparse
-
-log = logging.getLogger(__name__)
-
-
-def test_func1(cfg):
-  x = cfg['x']
-  y = (x - 10) * (x - 10)
-  log.debug("f({}) -> {}".format(x, y))
-  return y
-
-
-def test_func2(cfg):
-  x = cfg['x']
-  y = (x + 10) * (x + 10)
-  log.debug("f({}) -> {}".format(x, y))
-  return y
-
-
-def test_func3(cfg):
-  x = cfg['x']
-  y = (x + 20) * (x + 20)
-  log.debug("f({}) -> {}".format(x, y))
-  return y
-
-
-def create_test_tuning_run(db):
-  parser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  args = parser.parse_args()
-  args.database = db
-  manipulator = ConfigurationManipulator()
-  manipulator.add_parameter(IntegerParameter('x', -200, 200))
-  interface = DefaultMeasurementInterface(args=args,
-                                          manipulator=manipulator,
-                                          project_name='examples',
-                                          program_name='api_test',
-                                          program_version='0.1')
-  api = TuningRunManager(interface, args)
-  return api
-
-
-def main():
-    apis = [create_test_tuning_run('sqlite:////tmp/a.db'),
-            create_test_tuning_run('sqlite:////tmp/b.db'),
-            create_test_tuning_run('sqlite:////tmp/c.db')]
-    test_funcs = [test_func1, test_func2, test_func3]
-    for x in xrange(100):
-      for api, test_func in zip(apis, test_funcs):
-        desired_result = api.get_next_desired_result()
-        if desired_result is None:
-          continue
-        cfg = desired_result.configuration.data
-        result = Result(time=test_func(cfg))
-        api.report_result(desired_result, result)
-
-    best_cfgs = [api.get_best_configuration() for api in apis]
-    for api in apis:
-      api.finish()
-
-    print('best x configs: {}'.format(best_cfgs))
-
-if __name__ == '__main__':
-  main()
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/.gitignore b/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/.gitignore
deleted file mode 100644
index aa0571caf15bdf4665fee72a1d87051d12718127..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-rosenbrock.db
-*.db
-opentuner.log
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/adddeps.py
deleted file mode 100644
index ede22a8fcdb2a94db7915ff3beb90894b2cb8592..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/adddeps.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/rosenbrock.makefile b/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/rosenbrock.makefile
deleted file mode 100755
index 7b9be87a9a8ba698083ad4ac2c228ed3f11ed8df..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/rosenbrock.makefile
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/make -f
-# use -j4 to run in parallel
-
-FN         := rosenbrock
-DIMS       := 4
-TECHNIQUES := $(shell ./rosenbrock.py --list-techniques)
-define test_loop
-DB="sqlite:///opentuner.db/$$RUN.db";     \
-for TEQ in $(TECHNIQUES); do          \
-	./rosenbrock.py --function=$(FN)    \
-									--technique=$$TEQ  \
-									--dimensions=$(DIMS)   \
-									--database=$$DB;       \
-done;
-endef
-
-default: run.1 run.2 run.3 run.4 run.5 run.6 run.7 run.8 run.9 run.10 run.11 \
-run.12 run.13 run.14 run.15 run.16 run.17 run.18 run.19 run.20 run.21 run.22 \
-run.23 run.24 run.25 run.26 run.27 run.28 run.29 run.30
-
-run.%:
-	RUN=$* $(test_loop)
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/rosenbrock.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/rosenbrock.py
deleted file mode 100755
index da426f239bdcdd945eca8630db0c96a8a60544d6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/rosenbrock/rosenbrock.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-#
-# This is a simple testcase purely for testing the autotuner
-#
-# http://en.wikipedia.org/wiki/Rosenbrock_function
-#
-# Also supports some other test functions taken from:
-# http://en.wikipedia.org/wiki/Test_functions_for_optimization
-#
-
-import adddeps  # fix sys.path
-
-import argparse
-import logging
-
-import opentuner
-from opentuner.measurement import MeasurementInterface
-from opentuner.search.manipulator import ConfigurationManipulator
-from opentuner.search.manipulator import FloatParameter
-
-log = logging.getLogger(__name__)
-
-parser = argparse.ArgumentParser(parents=opentuner.argparsers())
-parser.add_argument('--dimensions', type=int, default=2,
-                    help='dimensions for the Rosenbrock function')
-parser.add_argument('--domain', type=float, default=1000,
-                    help='bound for variables in each dimension')
-parser.add_argument('--function', default='rosenbrock',
-                    choices=('rosenbrock', 'sphere', 'beale'),
-                    help='function to use')
-
-
-class Rosenbrock(MeasurementInterface):
-  def run(self, desired_result, input, limit):
-    cfg = desired_result.configuration.data
-    val = 0.0
-    if self.args.function == 'rosenbrock':
-      # the actual rosenbrock function:
-      for d in xrange(self.args.dimensions - 1):
-        x0 = cfg[d]
-        x1 = cfg[d + 1]
-        val += 100.0 * (x1 - x0 ** 2) ** 2 + (x0 - 1) ** 2
-    elif self.args.function == 'sphere':
-      for d in xrange(self.args.dimensions):
-        xi = cfg[d]
-        val += xi ** 2
-    elif self.args.function == 'beale':
-      assert self.args.dimensions == 2
-      assert self.args.domain == 4.5
-      x = cfg[0]
-      y = cfg[1]
-      val = ((1.5 - x + x * y) ** 2 +
-             (2.25 - x + x * y ** 2) ** 2 +
-             (2.625 - x + x * y ** 3) ** 2)
-    return opentuner.resultsdb.models.Result(time=val)
-
-  def manipulator(self):
-    manipulator = ConfigurationManipulator()
-    for d in xrange(self.args.dimensions):
-      manipulator.add_parameter(FloatParameter(d,
-                                               -self.args.domain,
-                                               self.args.domain))
-    return manipulator
-
-  def program_name(self):
-    return self.args.function
-
-  def program_version(self):
-    return "%dx%d" % (self.args.dimensions, self.args.domain)
-
-  def save_final_config(self, configuration):
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-
-
-if __name__ == '__main__':
-  args = parser.parse_args()
-  if args.function == 'beale':
-    # fixed for this function
-    args.domain = 4.5
-    args.dimensions = 2
-  Rosenbrock.main(args)
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/adddeps.py
deleted file mode 100644
index ede22a8fcdb2a94db7915ff3beb90894b2cb8592..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/adddeps.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/att48_d.txt b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/att48_d.txt
deleted file mode 100644
index b93e36ccfa194c574fd9473921fcee2d6820015c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/att48_d.txt
+++ /dev/null
@@ -1,48 +0,0 @@
-         0      4727      1205      6363      3657      3130      2414       563       463      5654      1713      1604      2368      2201      1290      1004      3833      2258      3419      2267      2957       720      1700      5279      2578      6076      3465      2654      3625      3115      1574      3951      1748      2142      6755      2383      3306      1029      3530       825      2188      4820      3489      1947      6835      1542      2379      3744
-      4727         0      3588      2012      1842      6977      6501      5187      5028      2327      4148      4723      3635      3125      4907      3930      7463      6338      7243      5105      4043      4022      3677      2863      3106      1850      7173      6630      1204      6814      6001      3447      5253      2656      3123      6274      7183      5622      3085      4564      2756      1591      7027      6186      3472      5461      4390      2088
-      1205      3588         0      5163      2458      3678      3071      1742      1444      4462      1184      1520      1498      1103      1501       951      4298      2903      3967      2169      2209       652       828      4136      1518      4873      3954      3254      2446      3581      2441      2960      1966       950      5564      2916      3878      2035      2482      1027      1395      3617      3891      2686      5661      2023      1867      2560
-      6363      2012      5163         0      2799      8064      7727      6878      6581      1402      5366      5946      4679      4378      6225      5709      8417      7578      8296      6135      4802      5707      4982      2322      4178       320      8186      7800      2778      7859      7408      3763      6461      4223      1427      7451      8263      7131      3669      6011      4638      1681      7987      7502      1877      6758      5360      2844
-      3657      1842      2458      2799         0      5330      4946      4200      3824      2012      2573      3157      1924      1580      3427      3179      5749      4793      5577      3409      2223      3066      2185      1860      1401      2491      5486      5035       894      5141      4611      1669      3677      1590      3113      4682      5533      4352      1252      3227      2426      1169      5313      4706      3241      3962      2651       304
-      3130      6977      3678      8064      5330         0       743      3209      2670      6929      2831      2266      3407      3854      2178      4076       727       881       293      1930      3310      3672      3315      6199      3932      7745       365       482      5774       261      1659      4513      1746      4431      7910       769       207      2225      4435      2681      5053      6384       550      1224      7805      1670      2704      5230
-      2414      6501      3071      7727      4946       743         0      2468      1952      6673      2380      1795      3051      3405      1604      3382      1469       168      1020      1681      3110      2993      2827      6009      3552      7412      1104       267      5300       821       916      4348      1270      3890      7698       332       900      1484      4185      2049      4415      6051      1219       482      7635      1054      2432      4884
-       563      5187      1742      6878      4200      3209      2468         0       718      6203      2241      2051      2920      2762      1687      1304      3932      2331      3487      2669      3487      1175      2260      5840      3141      6596      3563      2728      4120      3240      1559      4507      2082      2658      7304      2512      3364       985      4091      1319      2544      5358      3632      1987      7391      1785      2879      4296
-       463      5028      1444      6581      3824      2670      1952       718         0      5789      1602      1343      2330      2291       970      1451      3376      1796      2959      1951      2835      1112      1725      5346      2628      6285      3007      2193      3889      2661      1122      3920      1372      2391      6883      1927      2845       611      3543       676      2590      4993      3039      1486      6934      1112      2196      3876
-      5654      2327      4462      1402      2012      6929      6673      6203      5789         0      4392      4947      3648      3501      5274      5183      7216      6535      7140      5022      3621      5077      4090       922      3207      1131      7014      6714      2437      6707      6477      2476      5432      3599      1102      6376      7121      6284      2497      5160      4318       937      6795      6507      1268      5773      4249      1914
-      1713      4148      1184      5366      2573      2831      2380      2241      1602      4392         0       586       766      1029       883      2040      3353      2224      3100      1049      1246      1625       503      3841      1196      5054      3042      2488      2945      2676      2087      2331      1114      1650      5459      2132      3037      1958      1997       931      2513      3701      2923      2137      5459      1394       711      2534
-      1604      4723      1520      5946      3157      2266      1795      2051      1343      4947       586         0      1299      1612       406      2208      2824      1639      2542       694      1586      1767      1050      4357      1770      5633      2498      1907      3520      2128      1558      2778       531      2171      6003      1552      2472      1538      2506       791      2912      4277      2403      1564      5983       827       892      3109
-      2368      3635      1498      4679      1924      3407      3051      2920      2330      3648       766      1299         0       646      1642      2446      3840      2905      3655      1488       730      2096       697      3076       533      4363      3567      3122      2453      3219      2842      1592      1791      1480      4706      2772      3610      2721      1232      1656      2550      3001      3403      2860      4697      2126       756      1836
-      2201      3125      1103      4378      1580      3854      3405      2762      2291      3501      1029      1612       646         0      1853      2026      4349      3247      4119      1997      1341      1753       606      3078       419      4070      4052      3517      1923      3690      3032      1866      2142       838      4593      3161      4060      2788      1380      1663      1932      2736      3915      3138      4647      2395      1351      1592
-      1290      4907      1501      6225      3427      2178      1604      1687       970      5274       883       406      1642      1853         0      2029      2803      1438      2466       986      1987      1593      1253      4716      2072      5915      2454      1764      3710      2082      1204      3164       497      2287      6342      1419      2379      1134      2867       554      2885      4569      2405      1289      6338       555      1297      3406
-      1004      3930       951      5709      3179      4076      3382      1304      1451      5183      2040      2208      2446      2026      2029         0      4759      3220      4368      2900      3151       442      1765      4960      2444      5443      4396      3610      2932      4034      2572      3891      2525      1590      6278      3313      4261      2033      3398      1476      1241      4287      4390      2928      6419      2428      2749      3337
-      3833      7463      4298      8417      5749       727      1469      3932      3376      7216      3353      2824      3840      4349      2803      4759         0      1601       477      2359      3617      4345      3851      6433      4372      8098       370      1206      6267       726      2384      4754      2335      4991      8148      1452       609      2949      4752      3331      5687      6746       437      1948      8005      2334      3098      5618
-      2258      6338      2903      7578      4793       881       168      2331      1796      6535      2224      1639      2905      3247      1438      3220      1601         0      1165      1563      2988      2829      2666      5882      3401      7263      1233       399      5138       923       794      4227      1117      3724      7565       286      1049      1348      4051      1881      4248      5903      1322       355      7508       887      2302      4736
-      3419      7243      3967      8296      5577       293      1020      3487      2959      7140      3100      2542      3655      4119      2466      4368       477      1165         0      2170      3520      3965      3588      6393      4183      7977       202       767      6041       438      1932      4706      2027      4711      8107      1061       132      2503      4652      2972      5344      6617       486      1501      7989      1962      2939      5469
-      2267      5105      2169      6135      3409      1930      1681      2669      1951      5022      1049       694      1488      1997       986      2900      2359      1563      2170         0      1430      2460      1547      4333      2019      5817      2079      1694      3910      1733      1813      2668       654      2694      6029      1366      2130      1991      2525      1474      3542      4455      1923      1641      5957      1071       777      3302
-      2957      4043      2209      4802      2223      3310      3110      3487      2835      3621      1246      1586       730      1341      1987      3151      3617      2988      3520      1430         0      2779      1387      2905      1062      4482      3398      3119      2922      3087      3115      1240      1953      2175      4607      2796      3501      3119      1136      2173      3268      3136      3189      3029      4527      2355       711      2042
-       720      4022       652      5707      3066      3672      2993      1175      1112      5077      1625      1767      2096      1753      1593       442      4345      2829      3965      2460      2779         0      1401      4781      2166      5427      3984      3212      2946      3620      2224      3603      2089      1496      6178      2906      3861      1719      3132      1040      1479      4211      3969      2553      6290      2012      2336      3189
-      1700      3677       828      4982      2185      3315      2827      2260      1725      4090       503      1050       697       606      1253      1765      3851      2666      3588      1547      1387      1401         0      3621       903      4675      3537      2954      2475      3169      2427      2254      1578      1148      5177      2598      3521      2194      1833      1074      2054      3340      3423      2541      5213      1801      1077      2190
-      5279      2863      4136      2322      1860      6199      6009      5840      5346       922      3841      4357      3076      3078      4716      4960      6433      5882      6393      4333      2905      4781      3621         0      2718      2042      6254      6024      2569      5966      5913      1687      4807      3384      1716      5699      6384      5787      1852      4687      4285      1272      6022      5892      1629      5178      3581      1639
-      2578      3106      1518      4178      1401      3932      3552      3141      2628      3207      1196      1770       533       419      2072      2444      4372      3401      4183      2019      1062      2166       903      2718         0      3864      4097      3635      1932      3748      3274      1448      2284      1164      4286      3283      4136      3086       967      1973      2285      2507      3935      3331      4312      2589      1284      1340
-      6076      1850      4873       320      2491      7745      7412      6596      6285      1131      5054      5633      4363      4070      5915      5443      8098      7263      7977      5817      4482      5427      4675      2042      3864         0      7866      7483      2515      7539      7101      3449      6146      3938      1375      7134      7944      6831      3349      5709      4397      1363      7667      7190      1798      6446      5041      2528
-      3465      7173      3954      8186      5486       365      1104      3563      3007      7014      3042      2498      3567      4052      2454      4396       370      1233       202      2079      3398      3984      3537      6254      4097      7866         0       839      5973       374      2019      4569      1996      4669      7970      1085       305      2581      4532      2976      5339      6509       287      1581      7844      1974      2838      5369
-      2654      6630      3254      7800      5035       482       267      2728      2193      6714      2488      1907      3122      3517      1764      3610      1206       399       767      1694      3119      3212      2954      6024      3635      7483       839         0      5427       558      1181      4349      1377      4044      7723       356       653      1744      4218      2241      4614      6121       955       743      7644      1231      2465      4957
-      3625      1204      2446      2778       894      5774      5300      4120      3889      2437      2945      3520      2453      1923      3710      2932      6267      5138      6041      3910      2922      2946      2475      2569      1932      2515      5973      5427         0      5612      4824      2550      4050      1498      3476      5071      5980      4470      2096      3388      1911      1501      5831      4994      3704      4264      3209      1196
-      3115      6814      3581      7859      5141       261       821      3240      2661      6707      2676      2128      3219      3690      2082      4034       726       923       438      1733      3087      3620      3169      5966      3748      7539       374       558      5612         0      1716      4280      1624      4298      7679       735       420      2263      4216      2606      4967      6179       400      1277      7567      1609      2501      5032
-      1574      6001      2441      7408      4611      1659       916      1559      1122      6477      2087      1558      2842      3032      1204      2572      2384       794      1932      1813      3115      2224      2427      5913      3274      7101      2019      1181      4824      1716         0      4330      1180      3346      7545      1023      1808       578      4062      1438      3693      5763      2115       440      7537       763      2404      4603
-      3951      3447      2960      3763      1669      4513      4348      4507      3920      2476      2331      2778      1592      1866      3164      3891      4754      4227      4706      2668      1240      3603      2254      1687      1448      3449      4569      4349      2550      4280      4330         0      3184      2510      3402      4031      4698      4281       533      3245      3612      2187      4339      4265      3296      3576      1941      1381
-      1748      5253      1966      6461      3677      1746      1270      2082      1372      5432      1114       531      1791      2142       497      2525      2335      1117      2027       654      1953      2089      1578      4807      2284      6146      1996      1377      4050      1624      1180      3184         0      2685      6475      1022      1952      1341      2963      1050      3358      4787      1926      1086      6436       422      1244      3619
-      2142      2656       950      4223      1590      4431      3890      2658      2391      3599      1650      2171      1480       838      2287      1590      4991      3724      4711      2694      2175      1496      1148      3384      1164      3938      4669      4044      1498      4298      3346      2510      2685         0      4697      3693      4636      2975      1981      1909      1124      2718      4565      3548      4830      2839      2140      1751
-      6755      3123      5564      1427      3113      7910      7698      7304      6883      1102      5459      6003      4706      4593      6342      6278      8148      7565      8107      6029      4607      6178      5177      1716      4286      1375      7970      7723      3476      7679      7545      3402      6475      4697         0      7393      8097      7370      3515      6249      5379      2001      7738      7556       461      6829      5267      3013
-      2383      6274      2916      7451      4682       769       332      2512      1927      6376      2132      1552      2772      3161      1419      3313      1452       286      1061      1366      2796      2906      2598      5699      3283      7134      1085       356      5071       735      1023      4031      1022      3693      7393         0       965      1542      3883      1913      4286      5772      1121       600      7322       902      2128      4608
-      3306      7183      3878      8263      5533       207       900      3364      2845      7121      3037      2472      3610      4060      2379      4261       609      1049       132      2130      3501      3861      3521      6384      4136      7944       305       653      5980       420      1808      4698      1952      4636      8097       965         0      2380      4629      2877      5250      6583       570      1380      7986      1866      2904      5432
-      1029      5622      2035      7131      4352      2225      1484       985       611      6284      1958      1538      2721      2788      1134      2033      2949      1348      2503      1991      3119      1719      2194      5787      3086      6831      2581      1744      4470      2263       578      4281      1341      2975      7370      1542      2380         0      3952      1127      3197      5518      2658      1002      7395       951      2429      4380
-      3530      3085      2482      3669      1252      4435      4185      4091      3543      2497      1997      2506      1232      1380      2867      3398      4752      4051      4652      2525      1136      3132      1833      1852       967      3349      4532      4218      2096      4216      4062       533      2963      1981      3515      3883      4629      3952         0      2873      3080      2012      4324      4046      3478      3328      1755      1000
-       825      4564      1027      6011      3227      2681      2049      1319       676      5160       931       791      1656      1663       554      1476      3331      1881      2972      1474      2173      1040      1074      4687      1973      5709      2976      2241      3388      2606      1438      3245      1050      1909      6249      1913      2877      1127      2873         0      2374      4392      2943      1659      6285      1012      1563      3254
-      2188      2756      1395      4638      2426      5053      4415      2544      2590      4318      2513      2912      2550      1932      2885      1241      5687      4248      5344      3542      3268      1479      2054      4285      2285      4397      5339      4614      1911      4967      3693      3612      3358      1124      5379      4286      5250      3197      3080      2374         0      3386      5284      3997      5585      3386      3125      2664
-      4820      1591      3617      1681      1169      6384      6051      5358      4993       937      3701      4277      3001      2736      4569      4287      6746      5903      6617      4455      3136      4211      3340      1272      2507      1363      6509      6121      1501      6179      5763      2187      4787      2718      2001      5772      6583      5518      2012      4392      3386         0      6314      5837      2205      5095      3680      1169
-      3489      7027      3891      7987      5313       550      1219      3632      3039      6795      2923      2403      3403      3915      2405      4390       437      1322       486      1923      3189      3969      3423      6022      3935      7667       287       955      5831       400      2115      4339      1926      4565      7738      1121       570      2658      4324      2943      5284      6314         0      1676      7603      1964      2662      5184
-      1947      6186      2686      7502      4706      1224       482      1987      1486      6507      2137      1564      2860      3138      1289      2928      1948       355      1501      1641      3029      2553      2541      5892      3331      7190      1581       743      4994      1277       440      4265      1086      3548      7556       600      1380      1002      4046      1659      3997      5837      1676         0      7521       744      2325      4670
-      6835      3472      5661      1877      3241      7805      7635      7391      6934      1268      5459      5983      4697      4647      6338      6419      8005      7508      7989      5957      4527      6290      5213      1629      4312      1798      7844      7644      3704      7567      7537      3296      6436      4830       461      7322      7986      7395      3478      6285      5585      2205      7603      7521         0      6805      5208      3102
-      1542      5461      2023      6758      3962      1670      1054      1785      1112      5773      1394       827      2126      2395       555      2428      2334       887      1962      1071      2355      2012      1801      5178      2589      6446      1974      1231      4264      1609       763      3576       422      2839      6829       902      1866       951      3328      1012      3386      5095      1964       744      6805         0      1644      3928
-      2379      4390      1867      5360      2651      2704      2432      2879      2196      4249       711       892       756      1351      1297      2749      3098      2302      2939       777       711      2336      1077      3581      1284      5041      2838      2465      3209      2501      2404      1941      1244      2140      5267      2128      2904      2429      1755      1563      3125      3680      2662      2325      5208      1644         0      2532
-      3744      2088      2560      2844       304      5230      4884      4296      3876      1914      2534      3109      1836      1592      3406      3337      5618      4736      5469      3302      2042      3189      2190      1639      1340      2528      5369      4957      1196      5032      4603      1381      3619      1751      3013      4608      5432      4380      1000      3254      2664      1169      5184      4670      3102      3928      2532         0
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/p01_d.txt b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/p01_d.txt
deleted file mode 100644
index 0464ad3143b4dff3176414a0b343f762ae5379b7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/p01_d.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-         0        29        82        46        68        52        72        42        51        55        29        74        23        72        46
-        29         0        55        46        42        43        43        23        23        31        41        51        11        52        21
-        82        55         0        68        46        55        23        43        41        29        79        21        64        31        51
-        46        46        68         0        82        15        72        31        62        42        21        51        51        43        64
-        68        42        46        82         0        74        23        52        21        46        82        58        46        65        23
-        52        43        55        15        74         0        61        23        55        31        33        37        51        29        59
-        72        43        23        72        23        61         0        42        23        31        77        37        51        46        33
-        42        23        43        31        52        23        42         0        33        15        37        33        33        31        37
-        51        23        41        62        21        55        23        33         0        29        62        46        29        51        11
-        55        31        29        42        46        31        31        15        29         0        51        21        41        23        37
-        29        41        79        21        82        33        77        37        62        51         0        65        42        59        61
-        74        51        21        51        58        37        37        33        46        21        65         0        61        11        55
-        23        11        64        51        46        51        51        33        29        41        42        61         0        62        23
-        72        52        31        43        65        29        46        31        51        23        59        11        62         0        59
-        46        21        51        64        23        59        33        37        11        37        61        55        23        59         0
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/p01_s.txt b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/p01_s.txt
deleted file mode 100644
index 38afab553d2a9c23c1abda12a95f6367d5d093e2..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/p01_s.txt
+++ /dev/null
@@ -1,16 +0,0 @@
- 1
-13
- 2
-15
- 9
- 5
- 7
- 3
-12
-14
-10
- 8
- 6
- 4
-11
- 1
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/tsp.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/tsp.py
deleted file mode 100755
index 0ddff5156497331daffef9b7385a20d63423bbd0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tsp/tsp.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-#
-# This is a simple testcase purely for testing the autotuner on permutations
-#
-# http://en.wikipedia.org/wiki/Travelling_salesman_problem
-#
-
-import adddeps #fix sys.path
-
-import argparse
-import logging
-
-import opentuner
-from opentuner.search.manipulator import (ConfigurationManipulator,
-                                          PermutationParameter)
-from opentuner.search.objective import MinimizeTime
-from opentuner.measurement import MeasurementInterface
-from opentuner.measurement.inputmanager import FixedInputManager
-from opentuner.tuningrunmain import TuningRunMain
-
-
-parser = argparse.ArgumentParser(parents=opentuner.argparsers())
-parser.add_argument('data', help='distance matrix file')
-
-class TSP(MeasurementInterface):
-    def __init__(self, args):
-        super(TSP, self).__init__(args)
-        data = args.data
-        m = open(data).readlines()
-        self.distance = [[int(i) for i in l.split()] for l in m]
-
-    def run(self, desired_result, input, limit):
-        cfg = desired_result.configuration.data
-        p = cfg[0]      # cheating: should use manipulator function
-        t = self.eval_path(p)
-        return opentuner.resultsdb.models.Result(time=t)
-
-    def eval_path(self, p):
-        """ Given permutation of cities as a list of indices,
-        return total path length """
-        out = sum(self.distance[p[i]][p[i+1]] for i in range(len(p)-1))
-##        print out, p
-        return out
-
-    def manipulator(self):
-        manipulator = ConfigurationManipulator()
-        manipulator.add_parameter(PermutationParameter(0, range(len(self.distance))))
-        return manipulator
-
-    def solution(self):
-        p = [1,13,2,15,9,5,7,3,12,14,10,8,6,4,11]
-        return self.eval_path(p)
-
-
-
-if __name__ == '__main__':
-  args = parser.parse_args()
-  TSP.main(args)
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/#accuracy_tuner.py# b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/#accuracy_tuner.py#
deleted file mode 100644
index 2110d0d692831e37f30023af05b92a0d91d9623c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/#accuracy_tuner.py#
+++ /dev/null
@@ -1,203 +0,0 @@
-#!/usr/bin/env python
-#
-# Optimize blocksize of apps/mmm_block.cpp
-#
-# This is an extremely simplified version meant only for tutorials
-#
-import adddeps  # fix sys.path
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-
-
-output_dir = ""
-flag_ranges = []
-tuning_flags = []
-binary_name = ""
-accuracy_threshold = 10.0
-opt_confs_index = 9
-evaluated_configs = {}
-
-
-def extractTotalOverhead(file_name):
-
-  total_comps = 0.0
-  file = open(file_name, "r")
-  for x in file:
-    words = x.split()
-    total_comps += float(words[opt_confs_index])
-  
-  print total_comps 
-  return total_comps
-
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  accuracy = float(acc_str)
-  print accuracy
-  return accuracy  
-  
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(accuracy_threshold)
-    input_manager = FixedInputManager(size=num_flags)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-    
-
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-    for flag in tuning_flags:
-      manipulator.add_parameter(
-        EnumParameter(flag, flag_ranges
-                      # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-                      )) #default is needed, optimizations don't work without it(tried and tested)
-    return manipulator
-
-  
-  def run(self, desired_result, input, limit):
-    """
-    Compile and run a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("opentuner_flags", cfg)
-    
-    run_cmd = binary_name
-    print run_cmd
-    run_result_call_program = self.call_program(run_cmd)
-    #print run_result_call_program
-
-    total_comps = extractTotalOverhead("accuracy_summary")
-    accuracy = getAccuracy("final_accuracy")
-    
-    #Result = opentuner.resultsdb.models.Result(time=total_comps)
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    Result.accuracy = accuracy
-
-    if accuracy > accuracy_threshold:
-      if accuracy not in evaluated_configs:
-        config_tuple = (total_comps, accuracy, cfg)
-        self.configs_list.append(config_tuple)
-        evaluated_configs[accuracy] = 1
-        shutil.copy('accuracy_summary', output_dir + '/' + binary_name + '_' + str(accuracy))
-
-    
-    return Result
-         
-
-  def save_final_config(self, configuration):
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-    
-    if not os.path.exists(result_dir):
-      os.mkdir(result_dir)
-    
-    createFlagsFile("opentuner_flags", configuration.data)
-    run_cmd = binary_name
-    run_result_call_program = self.call_program(run_cmd)
-
-    accuracy = getAccuracy("final_accuracy")
-    shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_final_' + str(accuracy) )
-
-    sorted_list = sorted(self.configs_list, key = lambda tup: tup[0])
-    print sorted_list[0:10]
-    
-    top_elems = 20
-    if len(sorted_list) < top_elems:
-      top_elems = len(sorted_list)
-
-      
-    for i in range(top_elems):
-      createFlagsFile("opentuner_flags", sorted_list[i][2])
-      run_cmd = binary_name
-      run_result_call_program = self.call_program(run_cmd)
-      accuracy = getAccuracy("final_accuracy")
-      shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_' + str(accuracy) + "_rank_" + str(i) )
-
-
-    #os.mkdir(result_dir + "full_results")
-  
-    
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='name of binary to run')
-  argparser.add_argument('--num-flags', type=int, help='num of flags to tune for')
-  argparser.add_argument('--error-range', type=int, help='num of flags to tune for') 
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='accuracy threshold')
-
-  
-  args = argparser.parse_args()
-  binary_name = str(args.binary)
-  print("binary_name = ", binary_name)
-  num_flags = int(args.num_flags)
-  error_range = int(args.error_range)
-  accuracy_threshold = float(args.accuracy)
-  print("accuracy = ", accuracy_threshold)
-  result_dir = args.result_dir  
-  if result_dir == "":
-    print("Provide --result-dir ")
-
-
-  output_dir = result_dir + "/full_results"
-  print output_dir
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(output_dir):
-    print("Creating output directory = ", output_dir)
-    os.mkdir(output_dir)
-
-  for j in range(error_range):
-    flag_ranges.append(j)
-
-  print("flag_ranges = ", flag_ranges)
-  
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-  
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/__init__.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/accuracy_tuner.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/accuracy_tuner.py
deleted file mode 100644
index 5977fe7ee5b4780139d2c5a865c8231361cf0f2c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/accuracy_tuner.py
+++ /dev/null
@@ -1,198 +0,0 @@
-#!/usr/bin/env python
-#
-
-import adddeps  # fix sys.path
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-
-
-output_dir = ""
-flag_ranges = []
-tuning_flags = []
-binary_name = ""
-accuracy_threshold = 10.0
-opt_confs_index = 9
-evaluated_configs = {}
-
-
-def extractTotalOverhead(file_name):
-
-  total_comps = 0.0
-  file = open(file_name, "r")
-  for x in file:
-    words = x.split()
-    total_comps += float(words[opt_confs_index])
-  
-  print total_comps 
-  return total_comps
-
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  accuracy = float(acc_str)
-  print accuracy
-  return accuracy  
-  
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(accuracy_threshold)
-    input_manager = FixedInputManager(size=num_flags)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-    for flag in tuning_flags:
-      manipulator.add_parameter(
-        EnumParameter(flag, flag_ranges
-                      # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-                      )) #default is needed, optimizations don't work without it(tried and tested)
-    return manipulator
-
-  
-  def run(self, desired_result, input, limit):
-    """
-    Compile and run a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("opentuner_flags", cfg)
-    
-    run_cmd = binary_name
-    print run_cmd
-    run_result_call_program = self.call_program(run_cmd)
-    #print run_result_call_program
-
-    total_comps = extractTotalOverhead("accuracy_summary")
-    accuracy = getAccuracy("final_accuracy")
-    
-    #Result = opentuner.resultsdb.models.Result(time=total_comps)
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    Result.accuracy = accuracy
-
-    if accuracy > accuracy_threshold:
-      if accuracy not in evaluated_configs:
-        config_tuple = (total_comps, accuracy, cfg)
-        self.configs_list.append(config_tuple)
-        evaluated_configs[accuracy] = 1
-        shutil.copy('accuracy_summary', output_dir + '/' + binary_name + '_' + str(accuracy))
-
-    
-    return Result
-         
-
-  def save_final_config(self, configuration):
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-    
-    if not os.path.exists(result_dir):
-      os.mkdir(result_dir)
-    
-    createFlagsFile("opentuner_flags", configuration.data)
-    run_cmd = binary_name
-    run_result_call_program = self.call_program(run_cmd)
-
-    accuracy = getAccuracy("final_accuracy")
-    shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_final_' + str(accuracy) )
-
-    sorted_list = sorted(self.configs_list, key = lambda tup: tup[0])
-    print sorted_list[0:10]
-    
-    top_elems = 20
-    if len(sorted_list) < top_elems:
-      top_elems = len(sorted_list)
-
-      
-    for i in range(top_elems):
-      createFlagsFile("opentuner_flags", sorted_list[i][2])
-      run_cmd = binary_name
-      run_result_call_program = self.call_program(run_cmd)
-      accuracy = getAccuracy("final_accuracy")
-      shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_' + str(accuracy) + "_rank_" + str(i) )
-
-
-    #os.mkdir(result_dir + "full_results")
-  
-    
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='name of binary to run')
-  argparser.add_argument('--num-flags', type=int, help='num of flags to tune for')
-  argparser.add_argument('--error-range', type=int, help='num of flags to tune for') 
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='accuracy threshold')
-
-  
-  args = argparser.parse_args()
-  binary_name = str(args.binary)
-  print("binary_name = ", binary_name)
-  num_flags = int(args.num_flags)
-  error_range = int(args.error_range)
-  accuracy_threshold = float(args.accuracy)
-  print("accuracy = ", accuracy_threshold)
-  result_dir = args.result_dir  
-  if result_dir == "":
-    print("Provide --result-dir ")
-
-
-  output_dir = result_dir + "/full_results"
-  print output_dir
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(output_dir):
-    print("Creating output directory = ", output_dir)
-    os.mkdir(output_dir)
-
-  for j in range(error_range):
-    flag_ranges.append(j)
-
-  print("flag_ranges = ", flag_ranges)
-  
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-  
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/accuracy_tuner_piped.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/accuracy_tuner_piped.py
deleted file mode 100644
index 6d46c5762ead377292337c47d045ee5e58322954..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/accuracy_tuner_piped.py
+++ /dev/null
@@ -1,269 +0,0 @@
-#!/usr/bin/env python
-#
-# Optimize blocksize of apps/mmm_block.cpp
-#
-# This is an extremely simplified version meant only for tutorials
-#
-import adddeps  # fix sys.path
-
-import argparse
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-from opentuner.search.objective import ThresholdAccuracyMinimizeTime
-from opentuner.measurement.inputmanager import FixedInputManager
-import shutil
-import os
-import sys
-import subprocess
-import threading
-import psutil
-
-from measure_confidence import dump_high_confidence_files
-from select_top_results import select_top_results
-from time import sleep
-
-
-output_dir = ""
-flag_ranges = []
-tuning_flags = []
-binary_name = ""
-accuracy_threshold = 10.0
-opt_confs_index = 9
-evaluated_configs = {}
-orig_result_dir = ""
-
-
-def extractTotalOverhead(file_name):
-
-  total_comps = 0.0
-  file = open(file_name, "r")
-  for x in file:
-    words = x.split()
-    total_comps += float(words[opt_confs_index])
-  
-  print total_comps 
-  return total_comps
-
-
-def getAccuracy(file_name):
-  
-  file = open(file_name, "r")
-  acc_str = file.read()
-  file.close()
-
-  accuracy = float(acc_str)
-  
-  try:
-    accuracy = float(acc_str)
-  except:
-    return 20
-    
-  print accuracy
-  return accuracy
-
-
-
-def kill(proc_pid):
-  process = psutil.Process(proc_pid)
-  for proc in process.children(recursive=True):
-    proc.kill()
-  process.kill()
-    
-
-
-def createFlagsFile(file_name, cfg):
-
-  f = open(file_name, "w+")
-  cmd_config = ""
-  for flag in tuning_flags:
-    flag_value = cfg[flag]
-    cmd_config += str(flag_value) + "\n"
-    
-  f.write(cmd_config)
-  f.close()
-
-
-class ClangFlagsTuner(MeasurementInterface):
-
-  def __init__(self, args):
-    objective = ThresholdAccuracyMinimizeTime(accuracy_threshold)
-    input_manager = FixedInputManager(size=num_flags)
-    self.configs_list = []
-
-    super(ClangFlagsTuner, self).__init__(
-        args, program_name=args.binary,
-        program_version=self.file_hash(args.binary),
-        input_manager=input_manager, objective=objective)
-
-
-    FNULL = open(os.devnull, 'wb')
-    #run_result_call_program = self.call_program(run_cmd)
-    self.start_process = subprocess.Popen([binary_name, "opentuner_run"]) #,  stdout=FNULL);
-
-    try:
-      os.mkfifo("/tmp/myfifo")
-    except OSError, e:
-      print("FIFO exists")
-
-    
-
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-    for flag in tuning_flags:
-      manipulator.add_parameter(
-        EnumParameter(flag, flag_ranges
-                      # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
-                      )) #default is needed, optimizations don't work without it(tried and tested)
-    return manipulator
-
-  
-  def run(self, desired_result, input, limit):
-
-    """
-    Run  a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    
-    # NOTE: creates the file with flags read by the runtime
-    createFlagsFile("opentuner_flags", cfg)
-    
-    run_cmd = binary_name
-    print run_cmd
-    #run_result_call_program = self.call_program(run_cmd)      
-  
-    # Using Named Pipes to signal execution to the DNN outer thread
-    fifo = open("/tmp/myfifo", "w")
-    fifo.write("start_run")
-    fifo.close()
-
-    print "Waiting for process to signal back - when done processing one run"
-
-    fifo2 = open("/tmp/myfifo", "r")
-    fifo2.read()
-    fifo2.close()
-
-    print "Process Signalled back"
-
-    total_comps = extractTotalOverhead("accuracy_summary")
-    accuracy = getAccuracy("final_accuracy")
-
-    
-    #Result = opentuner.resultsdb.models.Result(time=total_comps)
-    Result = opentuner.resultsdb.models.Result()
-    Result.time = total_comps
-    Result.accuracy = accuracy
-
-    if accuracy > accuracy_threshold:
-      if accuracy not in evaluated_configs:
-        config_tuple = (total_comps, accuracy, cfg)
-        self.configs_list.append(config_tuple)
-        evaluated_configs[accuracy] = 1
-        shutil.copy('accuracy_summary', output_dir + '/' + binary_name + '_' + str(accuracy))
-
-        
-    print "done with one run"
-    
-    return Result
-
-
-  def save_final_config(self, configuration):
-
-    print "Dumping High Confidence results"
-    sleep(5)
-    
-    # Only dumping files with 95% confidence
-    dump_high_confidence_files(binary_name, orig_result_dir, accuracy_threshold, 95)
-    select_top_results(orig_result_dir + "/high_confidence")
-
-    
-    #self.start_process.kill()
-    kill(self.start_process.pid)
-    
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    print "Final configuration", configuration.data
-
-    return
-
-    
-    if not os.path.exists(result_dir):
-      os.mkdir(result_dir)
-    
-    createFlagsFile("opentuner_flags", configuration.data)
-    run_cmd = binary_name
-    run_result_call_program = self.call_program(run_cmd)
-
-    accuracy = getAccuracy("final_accuracy")
-    shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_final_' + str(accuracy) )
-
-    sorted_list = sorted(self.configs_list, key = lambda tup: tup[0])
-    print sorted_list[0:10]
-    
-    top_elems = 20
-    if len(sorted_list) < top_elems:
-      top_elems = len(sorted_list)
-
-      
-    for i in range(top_elems):
-      createFlagsFile("opentuner_flags", sorted_list[i][2])
-      run_cmd = binary_name
-      run_result_call_program = self.call_program(run_cmd)
-      accuracy = getAccuracy("final_accuracy")
-      shutil.copy('accuracy_summary', result_dir + '/' + binary_name + '_' + str(accuracy) + "_rank_" + str(i) )
-
-
-    #os.mkdir(result_dir + "full_results")
-  
-    
-
-
-if __name__ == '__main__':
-
-  argparser = argparse.ArgumentParser(parents=opentuner.argparsers())
-  argparser.add_argument('--binary', help='name of binary to run')
-  argparser.add_argument('--num-flags', type=int, help='num of flags to tune for')
-  argparser.add_argument('--error-range', type=int, help='num of flags to tune for') 
-  argparser.add_argument('--accuracy', type=float, help='accuracy threshold')
-  argparser.add_argument('--result-dir', help='accuracy threshold')
-
-  
-  args = argparser.parse_args()
-  binary_name = str(args.binary)
-  print("binary_name = ", binary_name)
-  num_flags = int(args.num_flags)
-  error_range = int(args.error_range)
-  accuracy_threshold = float(args.accuracy)
-  print("accuracy = ", accuracy_threshold)
-  result_dir = args.result_dir
-  orig_result_dir = result_dir
-  if result_dir == "":
-    print("Provide --result-dir ")
-
-
-  output_dir = result_dir + "/full_results"
-  print output_dir
-  if not os.path.exists(result_dir):
-    os.mkdir(result_dir)
-    
-  if not os.path.exists(output_dir):
-    print("Creating output directory = ", output_dir)
-    os.mkdir(output_dir)
-
-  for j in range(error_range):
-    flag_ranges.append(j)
-
-  print("flag_ranges = ", flag_ranges)
-  
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-  
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/adddeps.py
deleted file mode 100644
index 72de04cf55e138a5ee5d0fdaf11da4b692045706..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/adddeps.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/gettingstarted.md b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/gettingstarted.md
deleted file mode 100644
index 8a442c5f44d6c501f686125d4468ca642f745920..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/gettingstarted.md
+++ /dev/null
@@ -1,215 +0,0 @@
----
-layout: default
-title: OpenTuner - Using OpenTuner
-permalink: /tutorial/gettingstarted/index.html
----
-
-Tutorial: Optimizing Block Matrix Multiplication
-================================================
-
-This tutorial assumes that you have checked out a copy of opentuner. For
-guidelines on how to get opentuner set up, refer [here][setup].
-
-[setup]: http://opentuner.org/tutorial/setup/
-
-Identifying a Program to Autotune
----------------------------------
-
-In order to do autotuning, you first need something to autotune. This will
-normally be your own program that you want to make either fast or better in
-some way.  For this tutorial we will use a blocked version of matrix multiply
-as an example. We will use opentuner to find the optimal value of the block
-size parameter.
-
-We will autotune the sample code below(based off of modification of code
-found [here][matrix-multiply-code]), making sure to take the block size as
-a compile time constant to the program.
-
-[matrix-multiply-code]: http://csapp.cs.cmu.edu/public/waside/waside-blocking.pdf
-
-Save the sample code below to examples/tutorials/mmm_block.cpp
-
-    #include <stdio.h>
-    #include <cstdlib>
-
-    #define N 100
-    
-    int main(int argc, const char** argv)
-    {
-    
-      int n = BLOCK_SIZE * (N/BLOCK_SIZE);
-      int a[N][N];
-      int b[N][N];
-      int c[N][N];
-      int sum=0;
-      for(int k1=0;k1<n;k1+=BLOCK_SIZE)
-      {
-          for(int j1=0;j1<n;j1+=BLOCK_SIZE)
-          {
-              for(int k1=0;k1<n;k1+=BLOCK_SIZE)
-              {
-                  for(int i=0;i<n;i++)
-                  {
-                      for(int j=j1;j<j1+BLOCK_SIZE;j++)
-                      {
-                          sum = c[i][j];
-                          for(int k=k1;k<k1+BLOCK_SIZE;k++)
-                          {
-                              sum += a[i][k] * b[k][j];
-                          }
-                          c[i][j] = sum;
-                      }
-                  }
-              }
-          }
-             }
-      return 0;
-    }
-
-Creating a New Autotuner with Opentuner
-------------------------------------
-Now we need to create a program that uses OpenTuner to optimize the program we just saved.
-
-Save the following code to examples/tutorials/mmm_tuner.py
-
-    #!/usr/bin/env python
-    #
-    # Optimize blocksize of apps/mmm_block.cpp
-    #
-    # This is an extremely simplified version meant only for tutorials
-    #
-    import adddeps  # fix sys.path
-
-    import opentuner
-    from opentuner import ConfigurationManipulator
-    from opentuner import IntegerParameter
-    from opentuner import MeasurementInterface
-    from opentuner import Result
-
-
-    class GccFlagsTuner(MeasurementInterface):
-
-      def manipulator(self):
-        """
-        Define the search space by creating a
-        ConfigurationManipulator
-        """
-        manipulator = ConfigurationManipulator()
-        manipulator.add_parameter(
-          IntegerParameter('blockSize', 1, 10))
-        return manipulator
-
-      def run(self, desired_result, input, limit):
-        """
-        Compile and run a given configuration then
-        return performance
-        """
-        cfg = desired_result.configuration.data
-
-        gcc_cmd = 'g++ mmm_block.cpp '
-        gcc_cmd += '-DBLOCK_SIZE='+ cfg['blockSize']
-        gcc_cmd += ' -o ./tmp.bin'
-
-        compile_result = self.call_program(gcc_cmd)
-        assert compile_result['returncode'] == 0
-
-        run_cmd = './tmp.bin'
-
-        run_result = self.call_program(run_cmd)
-        assert run_result['returncode'] == 0
-
-        return Result(time=run_result['time'])
-
-      def save_final_config(self, configuration):
-        """called at the end of tuning"""
-        print "Optimal block size written to mmm_final_config.json:", configuration.data
-        self.manipulator().save_to_file(configuration.data,
-                                        'mmm_final_config.json')
-
-
-    if __name__ == '__main__':
-      argparser = opentuner.default_argparser()
-      GccFlagsTuner.main(argparser.parse_args())
-
-
-This file consists of several components, each of which will be discussed in further detail below.
-
-Tuning Programs have a general structure as follows:
-
-    from opentuner import MeasurementInterface
-    from opentuner import Result
-
-Create an instance of class GccFlagsTuner, which tunes specified parameters using opentuner.
-    class GccFlagsTuner(MeasurementInterface):
-
-The manipulator method defines the variable search space by specifying parameters that should be tuned by this instance of GccFlagsTuner
-
-    def manipulator(self):
-      """
-      Define the search space by creating a
-      ConfigurationManipulator
-      """
-      manipulator = ConfigurationManipulator()
-      manipulator.add_parameter(
-        IntegerParameter('blockSize', 1, 10))
-      return manipulator
-
-The run method actually runs opentuner under the given configuration and returns the calculated performance under this configuration. In this example, the blockSize parameter to be tuned is input as a compile-time constant that takes on a value within the specified range each time it is run. However, opentuner also supports other methods of specifying these parameters that may be preferred in different use cases.
-
-    def run(self, desired_result, input, limit):
-      """
-      Compile and run a given configuration then
-      return performance
-      """
-      cfg = desired_result.configuration.data
-
-      gcc_cmd = 'g++ mmm_block.cpp '
-      gcc_cmd += '-DBLOCK_SIZE='+ cfg['blockSize']
-      gcc_cmd += ' -o ./tmp.bin'
-
-      compile_result = self.call_program(gcc_cmd)
-      assert compile_result['returncode'] == 0
-
-      run_cmd = './tmp.bin'
-
-      run_result = self.call_program(run_cmd)
-      assert run_result['returncode'] == 0
-
-      return Result(time=run_result['time'])
-
-We can actually display the result of running opentuner(the optimal block size for our multiplication problem) by creating a method, save_final_config() in our class. This saves a json dictionary of the optimal blockSize parameter found to the file mmm_final_config.json
-
-    def save_final_config(self, configuration):
-      """called at the end of tuning"""
-      print "Optimal block size written to mmm_final_config.json:", configuration.data
-      self.manipulator().save_to_file(configuration.data,
-                                      'mmm_final_config.json')
-
-    if __name__ == '__main__':
-      argparser = opentuner.default_argparser()
-      GccFlagsTuner.main(argparser.parse_args())
-
-Generating and Viewing Results
-------------------------------
-
-Run the following command to autotune our program(The --no-dups flag hides warnings about duplicate results and the --stop-after parameter specifies that we are running opentuner for a maximum of 30 seconds):
-
-    python mmm_tuner.py --no-dups --stop-after=30
-
-The results of each run configuration will be displayed as follows(output lines are truncated for readability here):
-
-    [    10s]    INFO opentuner.search.plugin.DisplayPlugin: tests=10, best {'BLOCK_SIZE': 4}, cost time=0.0081, found by DifferentialEvolutionAlt[...]
-    [    19s]    INFO opentuner.search.metatechniques: AUCBanditMetaTechniqueA: [('DifferentialEvolutionAlt', 477), ('UniformGreedyMutation', 18), ('NormalGreedyMutation', 5), ('RandomNelderMead', 1)]
-    [    20s]    INFO opentuner.search.plugin.DisplayPlugin: tests=10, best {'BLOCK_SIZE': 4}, cost time=0.0081, found by DifferentialEvolutionAlt[...]
-    [    30s]    INFO opentuner.search.plugin.DisplayPlugin: tests=10, best {'BLOCK_SIZE': 4}, cost time=0.0081, found by DifferentialEvolutionAlt[...]
-    [    30s]    INFO opentuner.search.plugin.DisplayPlugin: tests=10, best {'BLOCK_SIZE': 4}, cost time=0.0081, found by DifferentialEvolutionAlt[...]
-    Optimal block size written to mmm_final_config.json: {'BLOCK_SIZE': 4}
-
-
-Look up the optimal BlockSize value by inspecting the following created file:
-
-    mmm_final_config.json
-
-In this example, the output file content was as follows:
-
-    {'BLOCK_SIZE': 4}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/measure_confidence.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/measure_confidence.py
deleted file mode 100644
index 655bdb024f72f0fd47807b5aa2696f9fb89b40e6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/measure_confidence.py
+++ /dev/null
@@ -1,164 +0,0 @@
-
-import argparse
-import os
-import sys
-
-
-def getAccuracy(file_name):
-
-  if not os.path.exists(file_name):
-    print("final_accuracy file not found ")
-    sys.exit(0)
-    
-  file = open(file_name, "r")
-  acc_str = file.read()
-  accuracy = float(acc_str)
-  print accuracy
-  return accuracy  
-
-
-total_runs = 100.0
-skip_lines = 0
-
-
-def test_func():
-  print "test_func"
-  sys.exit(0)
-
-
-def do_multiple_runs(binary_name, accuracy_threshold, confidence_threshold):
-
-  #total_runs = 100.0
-  successful_runs = 0.0
-  total_acc = 0
-
-  for i in range(int(total_runs)):
-
-    fifo = open("/tmp/myfifo", "w")
-    fifo.write("start_run")
-    fifo.close()
-
-    print "Waiting for process to signal back - when done processing one run"
-
-    fifo2 = open("/tmp/myfifo", "r")
-    fifo2.read()
-    fifo2.close()
-
-    print "Process Signalled back"
-
-    accuracy = getAccuracy("final_accuracy")
-    total_acc += accuracy
-
-    if accuracy > accuracy_threshold:
-      successful_runs += 1
-
-  confidence = (successful_runs / total_runs) * 100.0    
-  print("confidence = ", confidence)    
-  avg_acc = total_acc / total_runs
-  print("average accuracy = ", avg_acc)
-
-  return confidence, avg_acc
-  
-
-def compute_confidence(binary_name, accuracy, confidence, result_dir, output_dir):
-
-  confidence_list = []
-  
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  for file_name in file_names:
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-    
-    f = open(result_dir + "/" + file_name)
-    tuner_file = open("opentuner_flags", "w+")
-
-    index = 0
-    results_str = ""
-    for x in f:
-      if index >= skip_lines:
-        error_knob = int(float(x.split()[1]))
-        print error_knob
-        tuner_file.write(str(error_knob) + "\n")
-
-      results_str += x
-      index += 1
-      
-    tuner_file.close()
-    
-    run_confidence, avg_accuracy = do_multiple_runs(binary_name, accuracy, confidence)
-
-    if run_confidence > 90:
-      f2 = open(output_dir + "/" + file_name, "w+")
-      f2.write("total_runs=" + str(total_runs) + "\t confidence=" + str(run_confidence) + "\t avg_accuracy=" + str(avg_accuracy) + "\n")
-      f2.write(results_str)
-      f2.close()
-
-    conf_result = (run_confidence, avg_accuracy, file_name)
-    confidence_list.append(conf_result) 
-
-  return confidence_list
-
-
-
-def dump_high_confidence_files(binary, result_dir, accuracy, confidence):
-
-  #result_dir = args.result_dir
-  output_dir = result_dir + "/high_confidence"
-  result_dir = result_dir + "/full_results"
-
-  if not os.path.exists(output_dir):
-    os.mkdir(output_dir)
-
-    
-  confidence_list = compute_confidence(binary, accuracy, confidence, result_dir, output_dir)
-  print confidence_list
-
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[0], reverse=True)
-   
-  output_file = open(output_dir + "/confidence_summary.txt", "w+")
-  for x in sorted_list:
-    output_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[2]) + "\n")    
-
-  output_file.close()
-  print  "Dumped Confidence Summary"
-  
-
-  
-
-
-
-if __name__ == "__main__":
-
-  argparser = argparse.ArgumentParser(description='runs best configs to get high confidence on accuracy')
-  argparser.add_argument('--result-dir', help='Directory containing OpenTuner configurations')
-  argparser.add_argument('--output-dir', help='Directory for storing output directory')
-  argparser.add_argument('--binary', help='Binary name to run')
-  argparser.add_argument('--accuracy', type=float,  help='Accuracy constraint')
-  argparser.add_argument('--confidence', type=float, help='Confidence threshold')
-  
-
-  args = argparser.parse_args()
-  result_dir = args.result_dir
-  output_dir = args.output_dir
-  binary = args.binary
-  accuracy = args.accuracy
-  confidence = args.confidence
-
-  confidence_list = compute_confidence(binary, accuracy, confidence, result_dir, output_dir)
-  #print confidence_list
-
-  sorted_list = sorted(confidence_list, key = lambda tup: tup[0], reverse=True)
-   
-  output_file = open(output_dir + "/confidence_summary.txt", "w+")
-  for x in sorted_list:
-    output_file.write(str(x[0]) + "\t" + str(x[1]) + "\t" + str(x[2]) + "\n")    
-
-  output_file.close()
-  
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/mmm_block.cpp b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/mmm_block.cpp
deleted file mode 100755
index 0bb76845f8d6653d1c90a0a5b387e75c46e18233..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/mmm_block.cpp
+++ /dev/null
@@ -1,36 +0,0 @@
-#include <stdio.h>
-#include <cstdlib>
-
-#define N 100
-
-int main(int argc, const char** argv)
-{
-
-  int n = BLOCK_SIZE * (N/BLOCK_SIZE);
-  int a[N][N];
-  int b[N][N];
-  int c[N][N];
-  int sum=0;
-  for(int k1=0;k1<n;k1+=BLOCK_SIZE)
-  {
-      for(int j1=0;j1<n;j1+=BLOCK_SIZE)
-      {
-          for(int k1=0;k1<n;k1+=BLOCK_SIZE)
-          {
-              for(int i=0;i<n;i++)
-              {
-                  for(int j=j1;j<j1+BLOCK_SIZE;j++)
-                  {
-                      sum = c[i][j];
-                      for(int k=k1;k<k1+BLOCK_SIZE;k++)
-                      {               
-                          sum += a[i][k] * b[k][j];
-                      }
-                      c[i][j] = sum;
-                  }
-              }
-          }
-      }
-         }
-  return 0;
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/mmm_tuner.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/mmm_tuner.py
deleted file mode 100644
index f92c4c3bfc9640514e4879b1e46480613015c207..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/mmm_tuner.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/env python
-#
-# Optimize blocksize of apps/mmm_block.cpp
-#
-# This is an extremely simplified version meant only for tutorials
-#
-import adddeps  # fix sys.path
-
-import opentuner
-from opentuner import ConfigurationManipulator
-from opentuner import MeasurementInterface
-from opentuner import Result
-from opentuner import EnumParameter
-import os
-import sys
-
-
-tuning_flags = []
-
-binary_name = ""
-
-
-class ClangFlagsTuner(MeasurementInterface):
-  def manipulator(self):
-    """
-    Define the search space by creating a
-    ConfigurationManipulator
-    """
-    manipulator = ConfigurationManipulator()
-    for flag in tuning_flags:
-      manipulator.add_parameter(
-        EnumParameter(flag,
-                      [0, 1, 2, 3, 4, 5, 6])) #default is needed, optimizations don't work without it(tried and tested)
-    return manipulator
-
-  def compile(self, cfg, id):
-    """
-    Compile a given configuration in parallel
-    """
-    cmd_config = ""
-    for flag in tuning_flags:
-      flag_value = cfg[flag]
-      cmd_config += " " + flag_value 
-
-    run_cmd = binary_name + cmd_config    
-    return self.call_program(run_cmd)
-
-  def run_precompiled(self, desired_result, input, limit, compile_result, id):
-    """
-    Run a compile_result from compile() sequentially and return performance
-    """
-    run_result_call_program = self.call_program(binary_filename.format(id))
-    run_result_getFileSize = self.getFileSize(output_filename)
-    self.store_size_list(run_result_getFileSize)
-    return Result(size=run_result_getFileSize['binary_size'],time=run_result_call_program['time'])
-
-  def run(self, desired_result, input, limit):
-    """
-    Compile and run a given configuration then
-    return performance
-    """
-    cfg = desired_result.configuration.data
-    self.store_config_list(cfg)
-    compile_result = self.compile(cfg, 0)
-    return self.run_precompiled(desired_result, input, limit, compile_result, 0)
-
-  list_size = [] # list of file sizes
-  list_config = [] #list of configurations
-  list_size_config = [] #list of file size with corresponding optimization
-  list_N_size_config=[]
-
-  def store_size_list(self, binary_size):
-    """stores file size in a list"""
-    self.list_size.append(binary_size)
-
-  def store_config_list(self,cfg):
-    """stores configurations in a list"""
-    self.list_config.append(cfg)
-
-  counter = 0
-  def save_final_config(self,configuration):
-    """saves list of file size with corresponding optimization in a file"""
-    for list in self.list_size:
-      dict_size_config = {self.list_size[self.counter]['binary_size']: self.list_config[self.counter]}
-      self.list_size_config.append(dict_size_config)
-      self.list_size_config.sort()
-      self.counter += 1
-    self.extract_topN_resuls(10)
-    print "ALL file sizes along with corresponding configurations writtent to size_config.json"
-    self.manipulator().save_to_file(self.list_size_config,
-                                      'size_config.json')
-
-  def extract_topN_resuls(self,N):
-    """extracts top N results w.r.t size,N currently set to 10"""
-    counter=0
-    for list in self.list_size_config:
-      if counter < N:
-        self.list_N_size_config.append(list)
-    print "Top "+str(N)+" file sizes along with corresponding configurations writtent to TopN_size_config.json"
-    self.manipulator().save_to_file(self.list_size_config,
-                                    'TopN_size_config.json')
-
-
-
-if __name__ == '__main__':
-
-  binary_name = sys.argv[1]
-  num_flags = int(sys.argv[2])
-
-  for i in range(num_flags):
-    tuning_flags.append("flag" + str(i))
-
-  print tuning_flags  
-    
-  argparser = opentuner.default_argparser()
-  ClangFlagsTuner.main(argparser.parse_args())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/select_top_results.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/select_top_results.py
deleted file mode 100644
index 7ee878e5f8f84f3f56ea982c1f933b2c1a5b914b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/tutorials/select_top_results.py
+++ /dev/null
@@ -1,101 +0,0 @@
-
-
-import argparse
-import sys
-import os
-
-
-log_index = 9
-linear_index = 10
-quad_index = 11
-
-top_k = 10
-skip_lines = 1
-
-
-def dump_results(sorted_list, k, result_dir, sub_dir):
-
-  ref_dir = result_dir + "/" + sub_dir
-  if not os.path.exists(ref_dir):
-    os.mkdir(ref_dir)
-  
-  for i in range(min(k, len(sorted_list)) ):
-    file_name = sorted_list[i][1]
-    file_name = ref_dir + "/" + file_name + "_rank_" + str(i)
-    f = open(file_name, "w+")
-    f.write(str(sorted_list[i][2]) + "\t")
-    f.write(str(sorted_list[i][3]) + "\t")
-    f.write(str(sorted_list[i][4]) + "\n")
-    f.write(sorted_list[i][0])
-    f.close()
-
-    
-    
-
-def select_top_results(result_dir):
-
-  if not os.path.exists(result_dir):
-    print("Path does not exist")
-    sys.exit(0)
-
-  file_names = os.listdir(result_dir)
-  print file_names
-
-  results_arr = []
-  
-  for file_name in file_names:
-
-    if file_name == "confidence_summary.txt":
-      continue
-    
-    # Skip sub-directories
-    if os.path.isdir(result_dir + "/" + file_name):
-      continue
-
-    log_result = 0.0
-    linear_result = 0.0
-    quad_result = 0.0
-    file_str = ""
-    
-    index = 0
-    f = open(result_dir + "/" + file_name)
-    for x in f:
-      if index >= skip_lines:
-        words = x.split()
-        log_result += float(words[log_index])
-        linear_result += float(words[linear_index])
-        quad_result += float(words[quad_index])
-        file_str += x 
-
-      index += 1
-
-
-    file_result = (file_str, file_name, log_result, linear_result, quad_result)          
-    results_arr.append(file_result)    
-
-    
-  sorted_list = sorted(results_arr, key = lambda tup: tup[2])
-  dump_results(sorted_list, top_k, result_dir, "log")
-
-  sorted_list = sorted(results_arr, key = lambda tup: tup[3])
-  dump_results(sorted_list, top_k, result_dir, "linear")
-
-  sorted_list = sorted(results_arr, key = lambda tup: tup[4])
-  dump_results(sorted_list, top_k, result_dir, "quad")
-
-
-#def select_top_configuration(result_dir):
-  
-
-if __name__ == "__main__":
-
-  argparser = argparse.ArgumentParser(description='runs best configs to get high confidence on accuracy')
-  argparser.add_argument('--result-dir', help='Directory containing OpenTuner configurations')
-
-  args = argparser.parse_args()
-  result_dir = args.result_dir
-
-  select_top_results(result_dir)
-  
-
-    
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/adddeps.py
deleted file mode 100644
index ede22a8fcdb2a94db7915ff3beb90894b2cb8592..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/adddeps.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# we would prefer a symbolic link, but it does not work on windows
-import os
-target = os.path.join(os.path.dirname(__file__),
-                      '../../opentuner/utils/adddeps.py')
-execfile(target, dict(__file__=target))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/cla_func.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/cla_func.py
deleted file mode 100644
index f4787a2f23f175457ee527f8569dca39bf450605..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/cla_func.py
+++ /dev/null
@@ -1,144 +0,0 @@
-import numpy as np
-import math
-
-
-class Op:
-  def __init__(self):
-    self.M = []
-    self.name = [];
-    self.mutation_partners = [];
-    self.anti_operator = [];
-
-    self.mutation_partners_no = []
-    self.anti_operator_no = []
-
-    # create all operators
-    self.create_operators()
-
-    # check unitarity of all operators
-    self.check_unitarity()
-
-    # determine the indices of the mutation partners
-    self.determine_index_of_mutation_partners()
-
-    # determine the indices of the anti-operators
-    self.determine_index_of_anti_operators()
-
-
-  def create_operators(self):
-
-    # example with +Z
-    #self.M.append(np.matrix([[1.0, 2.0], [2.0+2.0j, 3.0]]))
-    # watch out: python needs 1.0 instead of just 1 to assume float variables
-    #self.name.append('asd');
-    #self.mutation_partners.append(['+z','+w']);
-    #self.anti_operator.append('+w');
-
-    # Operators
-    alpha = math.pi / 3.0;
-    da = math.pi / 10.0;
-
-    # operator 1 +z
-    self.M.append(np.matrix(
-      [[math.cos(da / 2.0) - 1j * math.sin(da / 2.0), 0.0],
-       [0.0, math.cos(da / 2.0) + 1j * math.sin(da / 2.0)]]))
-    self.name.append('+z');
-    self.mutation_partners.append(['-z', '+w', '-w']);
-    self.anti_operator.append('-z');
-
-    # operator 2 -z
-    self.M.append(np.matrix(
-      [[math.cos(-da / 2.0) - 1j * math.sin(-da / 2.0), 0.0],
-       [0.0, math.cos(-da / 2.0) + 1j * math.sin(-da / 2.0)]]))
-    self.name.append('-z');
-    self.mutation_partners.append(['+z', '+w', '-w']);
-    self.anti_operator.append('+z');
-
-    # operator 3 +w
-    self.M.append(np.matrix([
-      [math.cos(da / 2.0) - 1j * math.cos(alpha) * math.sin(da / 2.0),
-       -math.sin(alpha) * math.sin(da / 2.0)],
-      [math.sin(alpha) * math.sin(da / 2.0),
-       math.cos(da / 2.0) + 1j * math.cos(alpha) * math.sin(da / 2.0)]]))
-    self.name.append('+w');
-    self.mutation_partners.append(['+z', '-z', '-w']);
-    self.anti_operator.append('-w');
-
-    # operator 4 -w
-    self.M.append(np.matrix([
-      [math.cos(-da / 2.0) - 1j * math.cos(alpha) * math.sin(-da / 2.0),
-       -math.sin(alpha) * math.sin(-da / 2.0)],
-      [math.sin(alpha) * math.sin(-da / 2.0),
-       math.cos(-da / 2.0) + 1j * math.cos(alpha) * math.sin(-da / 2.0)]]))
-    self.name.append('-w');
-    self.mutation_partners.append(['+z', '-z', '+w']);
-    self.anti_operator.append('+w');
-
-
-  def check_unitarity(self):
-    # this function checks if all defined operators are unitary
-    # in case one isn't unitary the program stops
-    for k in range(len(self.M)):
-      if (np.trace(self.M[k] * self.M[k].getH()) - 2 != 0):
-        print "Operator " + self.name[k] + " (no. " + str(
-          k) + ") isn't unitary!"
-        exit()
-
-  def determine_index_of_mutation_partners(self):
-    # create a field for each operator with an array of possible other gates for the mutation step
-    for k in range(len(self.M)):
-      hlp = []
-      for m in range(len(self.mutation_partners[k])):
-        # go through all possible partners and find them among the operators
-        for n in range(len(self.M)):
-          if self.mutation_partners[k][m] is self.name[n]:
-            hlp.append(n)
-      self.mutation_partners_no.append(hlp)
-
-  def determine_index_of_anti_operators(self):
-    # determine the Anti operator index
-    for k in range(len(self.M)):
-      found_operator = False
-      for n in range(len(self.M)):
-        # go through all possible partners and find them among the operators
-        if self.anti_operator[k] is self.name[n]:
-          self.anti_operator_no.append(n);
-          found_operator = True
-
-      if found_operator == False:
-        print "Couldn't find the anti-operator for operator " + self.name[
-          k] + " (no " + str(k) + ")"
-
-  def __str__(self):
-    # just a test to play around
-    hlpstr = ''
-    for k in range(len(self.M)):
-      hlpstr = hlpstr + self.name[k] + " " + str(
-        self.anti_operator_no[k]) + "\n"
-
-    return "Operator Class:\n" + hlpstr
-
-
-def calc_fidelity(sequence, Op, Ugoal):
-  # Op will be function that return operator matrix
-  # Ugoal 2x2 unitary matrix
-  # sequence = [1 2 3 4];
-  # return = fidelity
-
-  # example:
-  # sequence = [1 4 2 4 5];
-  # Uapprox = Op(1) * Op(4) * Op(2) * Op(4) * Op(5);
-
-  # create identity matrix
-  Uapprox = np.eye(len(Ugoal))
-
-  for k in range(len(sequence)):
-    Uapprox = Op.M[sequence[k]] * Uapprox
-
-  # M.getH() returns the complex conjugate of self
-  result = (1.0 / len(Ugoal)) * abs(np.trace(Ugoal * Uapprox.getH()))
-
-  return result
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/input_generator.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/input_generator.py
deleted file mode 100644
index 009af836f435d013050ff877c4cd66d86019edfc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/input_generator.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import numpy as np
-import math
-import random
-
-
-def generate_random_Ugoal_HARD(N, **kwargs):
-  # N is the length of random matrix multiplication yielding Ugoal
-  # N ~ 100 should be enough
-  # This method is hard because it creates Ugoal over the whole space
-  # Ugoal 2x2 unitary matrix
-
-  # create identity matrix
-  Ugoal = np.eye(2)
-
-  # create all N random angles in 2*pi*[0,1)
-  seq_angle = 2.0 * math.pi * np.random.rand(1, N)
-
-  # determine random operator
-  help2 = np.random.randint(3, size=(1, N))
-
-  for k in range(N):
-    hlp = seq_angle[0][k];
-    if help2[0][k] == 0:
-      Ugoal = X_Mat(hlp) * Ugoal
-    elif help2[0][k] == 1:
-      Ugoal = Y_Mat(hlp) * Ugoal
-    else:
-      Ugoal = Z_Mat(hlp) * Ugoal
-
-  return Ugoal
-
-
-def generate_random_Ugoal_EASY(N, alpha):
-  # N is the length of random matrix multiplication yielding Ugoal
-  # N ~ 100 should be enough
-  # alpha is the used angle between rotation axes
-  # This method is easy because it creates Ugoal over the whole space
-  # Ugoal 2x2 unitary matrix
-
-  # create identity matrix
-  Ugoal = np.eye(2)
-
-  # create all N random angles in 2*pi*[0,1)
-  seq_angle = 2.0 * math.pi * np.random.rand(1, N)
-
-  # determine random operator
-  help2 = np.random.randint(2, size=(1, N))
-
-  for k in range(N):
-    hlp = seq_angle[0][k];
-    if help2[0][k] == 0:
-      Ugoal = Z_Mat(hlp) * Ugoal
-    else:
-      Ugoal = W_Mat(hlp, alpha) * Ugoal
-
-  return Ugoal
-
-
-def generate_random_Ugoal_RANDOM(**kwargs):
-  # Random guess with the following parametrization for U
-  # U = @(q1, q2, q3) [
-  #				[ cos(q1)*exp( i*q2 ), sin(q1)*exp( i*q3 )];
-  #                [-sin(q1)*exp(-i*q3 ), cos(q1)*exp(-i*q2 )]
-  #                    ];
-
-  # create random angles
-  q1 = random.uniform(0.0, 0.5 * math.pi)
-  q2 = random.uniform(0.0, 2.0 * math.pi)
-  q3 = random.uniform(0.0, 2.0 * math.pi)
-
-  return np.matrix([
-    [math.cos(q1) * my_cexp(q2), math.sin(q1) * my_cexp(q3)],
-    [-math.sin(q1) * my_cexp(-q3), math.cos(q1) * my_cexp(-q2)]])
-
-
-def my_cexp(x):
-  return math.cos(x) + 1j * math.sin(x)
-
-
-def X_Mat(a):
-  return np.matrix([[math.cos(a / 2.0), -1j * math.sin(a / 2.0)],
-                    [-1j * math.sin(a / 2.0), math.cos(a / 2.0)]])
-
-
-def Y_Mat(a):
-  return np.matrix([[math.cos(a / 2.0), -math.sin(a / 2.0)],
-                    [math.sin(a / 2.0), math.cos(a / 2.0)]])
-
-
-def Z_Mat(a):
-  return np.matrix([[math.cos(-a / 2.0) + 1j * math.sin(-a / 2.0), 0],
-                    [0, math.cos(a / 2.0) + 1j * math.sin(a / 2.0)]])
-
-
-def W_Mat(a, alpha):
-  return np.matrix([[math.cos(a / 2) - 1j * math.cos(alpha) * math.sin(a / 2.0),
-                     -math.sin(a / 2.0) * math.sin(alpha)],
-                    [math.sin(a / 2.0) * math.sin(alpha),
-                     math.cos(a / 2.0) + 1j * math.cos(alpha) * math.sin(
-                       a / 2.0)]])
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/problem_description.pdf b/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/problem_description.pdf
deleted file mode 100644
index e8d09de95a8a6416bf88f10a4d6e4a0fca92670d..0000000000000000000000000000000000000000
Binary files a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/problem_description.pdf and /dev/null differ
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/testwrapper.sh b/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/testwrapper.sh
deleted file mode 100755
index e08593a8764b81a8e68380f9d46753c7a73859c0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/testwrapper.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-COUNT=50
-for Z in `seq $COUNT`
-do
-  for T in `./unitary.py --list-techniques $@`;
-  do
-    echo $Z/$COUNT $T
-    ./unitary.py --technique=$T $@
-  done
-done
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/unitary.py b/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/unitary.py
deleted file mode 100755
index cfa5fe114155f9a7efbd25d191d520846e3d4017..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/examples/unitary/unitary.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python
-#
-# This is a quantum control example motivated by the experimental need
-# to synthesize unitary matrices in SU(2) in optimal time, given an
-# explicit and finite control set generating the whole space, and an
-# admissible error.
-#
-# See problem_description.pdf for additional details.
-#
-# Contributed by Clarice D. Aiello <clarice@mit.edu>
-#
-
-import adddeps  # fix sys.path
-
-import argparse
-import logging
-import math
-import random
-import sys
-
-try:
-  import numpy as np
-except:
-  print >> sys.stderr, '''
-
-ERROR: import numpy failed, please install numpy
-
-Possible things to try:
-  ../../venv/bin/pip install numpy
-  ../../venv/bin/easy_install numpy
-  sudo apt-get install python-numpy
-
-'''
-  raise
-
-import opentuner
-
-from math import sqrt
-import cla_func
-from input_generator import (generate_random_Ugoal_HARD,
-                             generate_random_Ugoal_EASY,
-                             generate_random_Ugoal_RANDOM)
-
-from opentuner.search.manipulator import (ConfigurationManipulator,
-                                          SwitchParameter,
-                                          IntegerParameter,
-                                          FloatParameter)
-
-
-def generate_random_Ugoal_FIXED(**kwargs):
-  Ag = -1 / sqrt(10);
-  Bg = sqrt(2) / sqrt(10);
-  Cg = -sqrt(3) / sqrt(10);
-  Dg = -sqrt(4) / sqrt(10);
-  return cla_func.np.matrix(
-    [[Ag + Cg * 1j, Bg + Dg * 1j], [-Bg + Dg * 1j, Ag - Cg * 1j]])
-
-
-log = logging.getLogger(__name__)
-
-generators = {
-  'hard': generate_random_Ugoal_HARD,
-  'easy': generate_random_Ugoal_EASY,
-  'random': generate_random_Ugoal_RANDOM,
-  'fixed': generate_random_Ugoal_FIXED,
-}
-
-parser = argparse.ArgumentParser(parents=opentuner.argparsers())
-parser.add_argument('--seq-len', type=int, default=10,
-                    help='maximum length for generated sequence')
-parser.add_argument('--goal-type', choices=generators.keys(), default='hard',
-                    help='method used to generate goal')
-parser.add_argument('--goal-n', type=int, default=100,
-                    help='argument to ugoal generator')
-parser.add_argument('--goal-alpha', type=float,
-                    default=random.random() * math.pi,
-                    help='argument to ugoal generator')
-
-
-class Unitary(opentuner.measurement.MeasurementInterface):
-  def __init__(self, *pargs, **kwargs):
-    super(Unitary, self).__init__(*pargs, **kwargs)
-
-    self.op = cla_func.Op()
-    self.num_operators = len(self.op.M)
-    self.Ugoal = generators[args.goal_type](N=args.goal_n,
-                                            alpha=args.goal_alpha)
-
-
-  def run(self, desired_result, input, limit):
-    cfg = desired_result.configuration.data
-
-    sequence = [cfg[i] for i in xrange(self.args.seq_len)
-                if cfg[i] < self.num_operators]
-    # sequence can be shorter than self.args.seq_len with null operator
-
-    if len(sequence) > 0:
-      accuracy = cla_func.calc_fidelity(sequence, self.op, self.Ugoal)
-      # ~.99 is acceptable
-    else:
-      accuracy = 0.0
-
-    return opentuner.resultsdb.models.Result(time=0.0,
-                                             accuracy=accuracy,
-                                             size=len(sequence))
-
-  def manipulator(self):
-    manipulator = ConfigurationManipulator()
-    for d in xrange(self.args.seq_len):
-      # we add 1 to num_operators allow a ignored 'null' operator
-      manipulator.add_parameter(SwitchParameter(d, self.num_operators + 1))
-    return manipulator
-
-  def save_final_config(self, configuration):
-    '''
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    '''
-    cfg = configuration.data
-    sequence = [cfg[i] for i in xrange(self.args.seq_len)
-                if cfg[i] < self.num_operators]
-    print "Final sequence", sequence
-
-  def objective(self):
-    # we could have also chosen to store 1.0 - accuracy in the time field
-    # and use the default MinimizeTime() objective
-    return opentuner.search.objective.MaximizeAccuracyMinimizeSize()
-
-
-if __name__ == '__main__':
-  args = parser.parse_args()
-  Unitary.main(args)
-
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/gen-venv-bootstrap.py b/llvm/projects/hpvm-tensor-rt/opentuner/gen-venv-bootstrap.py
deleted file mode 100755
index ff159bb1080e7f3f0979e4b60f4d41eea5c9d1e9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/gen-venv-bootstrap.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!./venv/bin/python
-
-extra = '''
-
-default_target_dir = 'venv'
-
-pip_install_packages = filter(len, open('requirements.txt').readlines())
-
-import os
-import subprocess
-import sys
-
-def adjust_options(options, args):
-  if len(args)==0:
-    os.chdir(os.path.dirname(__file__))
-    args.append(default_target_dir)
-
-def after_install(options, home_dir):
-  from os.path import join
-  pip = join(home_dir, 'bin/pip')
-  if not os.path.exists(pip):
-    # on windows
-    pip = join(home_dir, 'Scripts/pip.exe')
-  if not os.path.exists(pip):
-    print "error", pip, "is missing"
-  if sys.version_info < (2, 7):
-    subprocess.call([pip, 'install', 'importlib'])
-  for prog in pip_install_packages:
-    subprocess.call([pip, 'install', prog])
-
-'''
-
-import os
-import virtualenv
-
-os.chdir(os.path.dirname(__file__))
-output = virtualenv.create_bootstrap_script(extra)
-f = open('venv-bootstrap.py', 'w').write(output)
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/install_reqs.sh b/llvm/projects/hpvm-tensor-rt/opentuner/install_reqs.sh
deleted file mode 100644
index e671a5f2a1619f7960fa7471774aa94cab3e0bd6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/install_reqs.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-pip2 install sqlalchemy
-pip2 install psutil
-pip2 install opentuner
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/misc/livedisplay.gnuplot b/llvm/projects/hpvm-tensor-rt/opentuner/misc/livedisplay.gnuplot
deleted file mode 100644
index 1d4f13021303b0df3c2821eac3935524f494e18f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/misc/livedisplay.gnuplot
+++ /dev/null
@@ -1,10 +0,0 @@
-
-set terminal x11
-set xlabel "Autotuning Seconds"
-set ylabel "Runtime Seconds"
-set xrange [0:600]
-
-plot "/tmp/livedisplay.dat" u 1:2 w lp lw 3 title "Best Execution Time", \
-     "/tmp/livedisplaydetails.dat" w p lw 2 title "Tests (excluding timeouts)"
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/misc/livedisplay.py b/llvm/projects/hpvm-tensor-rt/opentuner/misc/livedisplay.py
deleted file mode 100755
index 5aa3d552d8e5506236d9e004c1f66370b7f19a23..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/misc/livedisplay.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-import os
-import argparse
-import subprocess
-import time
-
-parser = argparse.ArgumentParser()
-parser.add_argument('--gnuplot-filename', default='livedisplay.gnuplot')
-parser.add_argument('--data', default='/tmp/livedisplay.dat')
-parser.add_argument('--details', default='/tmp/livedisplaydetails.dat')
-parser.add_argument('--xrange', type=float, default=300)
-parser.add_argument('--yrange', type=float, default=.05)
-parser.add_argument('--yrange2', type=float, default=1.0)
-parser.add_argument('--remote')
-args = parser.parse_args()
-
-if args.remote:
-  if os.path.exists(args.data):
-    os.unlink(args.data)
-  if os.path.exists(args.details):
-    os.unlink(args.details)
-  syncproc = subprocess.Popen(
-      ["ssh", args.remote, "tail -f -n10000 " + args.data],
-      stdout=open(args.data, "w"))
-  syncproc2 = subprocess.Popen(
-      ["ssh", args.remote, "tail -f -n10000 " + args.details],
-      stdout=open(args.details, "w"))
-
-while '\n' not in open(args.data).read():
-  time.sleep(1)
-while '\n' not in open(args.details).read():
-  time.sleep(1)
-
-p1 = subprocess.Popen(["gnuplot"], stdin=subprocess.PIPE)
-p1.stdin.write(open(args.gnuplot_filename).read())
-print >> p1.stdin, 'set title "Zoomed out"'
-print >> p1.stdin, "set xrange [0:%f]" % args.xrange
-print >> p1.stdin, "set yrange [0:%f]" % args.yrange2
-p1.stdin.flush()
-
-time.sleep(1)
-
-p2 = subprocess.Popen(["gnuplot"], stdin=subprocess.PIPE)
-p2.stdin.write(open(args.gnuplot_filename).read())
-print >> p2.stdin, 'set title "Zoomed in"'
-print >> p2.stdin, "set xrange [0:%f]" % args.xrange
-print >> p2.stdin, "set yrange [0:%f]" % args.yrange
-p2.stdin.flush()
-
-procs = [p1, p2]
-
-while True:
-  time.sleep(1)
-  for p in procs:
-    print >> p.stdin, "replot"
-    p.stdin.flush()
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/__init__.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/__init__.py
deleted file mode 100644
index 09a5dead02d214f4dce641069d7be66c124f278a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/__init__.py
+++ /dev/null
@@ -1,41 +0,0 @@
-
-import measurement
-import resultsdb
-import search
-import tuningrunmain
-from opentuner.measurement import MeasurementInterface
-from opentuner.resultsdb.models import Configuration
-from opentuner.resultsdb.models import DesiredResult
-from opentuner.resultsdb.models import Result
-from opentuner.resultsdb.models import TuningRun
-from opentuner.search.manipulator import ConfigurationManipulator
-from opentuner.search.manipulator import EnumParameter
-from opentuner.search.manipulator import FloatParameter
-from opentuner.search.manipulator import IntegerParameter
-from opentuner.search.manipulator import LogFloatParameter
-from opentuner.search.manipulator import LogIntegerParameter
-from opentuner.search.manipulator import PermutationParameter
-from opentuner.search.manipulator import ScheduleParameter
-from opentuner.search.manipulator import SwitchParameter
-from opentuner.tuningrunmain import init_logging
-
-
-def argparsers():
-  """
-  return a list of ArguementParser to be used as parents to the user's
-  """
-  return [
-      measurement.driver.argparser,
-      measurement.interface.argparser,
-      search.driver.argparser,
-      search.plugin.argparser,
-      search.technique.argparser,
-      #stats.argparser,
-      tuningrunmain.argparser,
-    ]
-
-
-def default_argparser():
-  import argparse
-  return argparse.ArgumentParser(parents=argparsers())
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/api.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/api.py
deleted file mode 100644
index 19a2f60935d7a700771778f0a1304f5ff5cbea6d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/api.py
+++ /dev/null
@@ -1,87 +0,0 @@
-from datetime import datetime
-from opentuner import tuningrunmain
-
-
-class TuningRunManager(tuningrunmain.TuningRunMain):
-  """
-  This class manages a tuning run in a "slave" configuration, where main()
-  is controlled by an another program.
-  """
-  def __init__(self, measurement_interface, args, **kwargs):
-    super(TuningRunManager, self).__init__(measurement_interface, args, **kwargs)
-    self.init()
-    self.tuning_run.state = 'RUNNING'
-    self.commit(force=True)
-    self.search_driver.external_main_begin()
-
-  def get_next_desired_result(self):
-    """
-    Returns a opentuner.resultsdb.DesiredResult that should be tested next.
-    """
-    dr = self.measurement_driver.query_pending_desired_results().first()
-    if dr is None:
-      self.search_driver.external_main_generation()
-      dr = self.measurement_driver.query_pending_desired_results().first()
-      if dr is None:
-        return None
-    self.measurement_driver.claim_desired_result(dr)
-    dr.limit = self.measurement_driver.run_time_limit(dr)
-    return dr
-
-  def get_desired_results(self):
-    """
-    Returns a list of all opentuner.resultsdb.DesiredResult that should be tested next.
-    """
-    drs = self.measurement_driver.query_pending_desired_results().all()
-    if len(drs) == 0:
-      self.search_driver.external_main_generation()
-      drs = self.measurement_driver.query_pending_desired_results().all()
-      if len(drs) == 0:
-        return []
-    for dr in drs:
-      self.measurement_driver.claim_desired_result(dr)
-      dr.limit = self.measurement_driver.run_time_limit(dr)
-
-    return drs
-
-  def report_result(self, desired_result, result, result_input=None):
-    """
-    Report a measured result.  desired_result should have been returned by
-    get_next_desired_result().
-    """
-    self.measurement_driver.report_result(desired_result, result, result_input)
-
-  def get_best_configuration(self):
-    """
-    The best configuration found so far.  From the current tuning run only.
-    """
-    try:
-      return self.search_driver.best_result.configuration.data
-    except AttributeError:
-      return None
-
-  def get_best_result(self):
-    """
-    The best result found so far.  From the current tuning run only.
-    """
-    try:
-      return self.search_driver.best_result
-    except AttributeError:
-      return None
-
-  def finish(self):
-    """
-    Called at the end of the tuning process to call hooks and close database
-    connections.
-    """
-    self.search_driver.external_main_end()
-    self.measurement_interface.save_final_config(
-        self.search_driver.best_result.configuration)
-    self.tuning_run.final_config = self.search_driver.best_result.configuration
-    self.tuning_run.state = 'COMPLETE'
-    self.tuning_run.end_date = datetime.now()
-    self.commit(force=True)
-    self.session.close()
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/driverbase.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/driverbase.py
deleted file mode 100644
index 5486889c0dcedd4342a9cb463aa0d5047f3c0932..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/driverbase.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from opentuner.resultsdb.models import *
-
-
-class DriverBase(object):
-  """
-  shared base class between MeasurementDriver and SearchDriver
-  """
-
-  def __init__(self,
-               session,
-               tuning_run,
-               objective,
-               tuning_run_main,
-               args,
-               **kwargs):
-    self.args = args
-    self.objective = objective
-    self.session = session
-    self.tuning_run_main = tuning_run_main
-    self.tuning_run = tuning_run
-    self.program = tuning_run.program
-
-  def results_query(self,
-                    generation=None,
-                    objective_ordered=False,
-                    config=None):
-    q = self.session.query(Result)
-    q = q.filter_by(tuning_run=self.tuning_run)
-
-    if config:
-      q = q.filter_by(configuration=config)
-
-    if generation is not None:
-      subq = (self.session.query(DesiredResult.result_id)
-              .filter_by(tuning_run=self.tuning_run,
-                         generation=generation))
-      q = q.filter(Result.id.in_(subq.subquery()))
-
-    if objective_ordered:
-      q = self.objective.result_order_by(q)
-
-    return q
-
-  def requests_query(self):
-    q = self.session.query(DesiredResult).filter_by(tuning_run=self.tuning_run)
-    return q
-    
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/#interface.py# b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/#interface.py#
deleted file mode 100644
index 4fe23da5d904183fa4d3c340a74e89918052823e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/#interface.py#
+++ /dev/null
@@ -1,359 +0,0 @@
-
-import abc
-import argparse
-import errno
-import hashlib
-import logging
-import os
-import re
-import signal
-import subprocess
-import threading
-import time
-from multiprocessing.pool import ThreadPool
-
-try:
-  import resource
-except ImportError:
-  resource = None
-
-try:
-  import fcntl
-except ImportError:
-  fcntl = None
-
-import opentuner
-from opentuner import resultsdb
-from opentuner.resultsdb.models import *
-
-log = logging.getLogger(__name__)
-
-argparser = argparse.ArgumentParser(add_help=False)
-argparser.add_argument('--parallel-compile', action='store_true',
-                       default=False,
-                       help="present if compiling can be done in parallel")
-
-the_io_thread_pool = None
-
-
-class MeasurementInterface(object):
-  """
-  abstract base class for compile and measurement
-  """
-  __metaclass__ = abc.ABCMeta
-
-  def __init__(self,
-               args=None,
-               project_name=None,
-               program_name='unknown',
-               program_version='unknown',
-               manipulator=None,
-               objective=None,
-               input_manager=None):
-    self.args = args
-    self._project = project_name
-    self._program = program_name
-    self._version = program_version
-    self._objective = objective
-    self._manipulator = manipulator
-    self._input_manager = input_manager
-
-    self.pids = []
-    self.pid_lock = threading.Lock()
-    self.parallel_compile = args.parallel_compile
-    # If parallel_compile is False then compile_and_run() will be invoked
-    # sequentially otherwise the driver first invokes compile() in parallel
-    # followed by run_precompiled() sequentially
-
-  def compile(self, config_data, id):
-    """
-    Compile in PARALLEL according to the configuration in config_data 
-    (obtained from desired_result.configuration) Should use id parameter 
-    to determine output location of executable Return value will be passed 
-    to run_precompiled as compile_result, useful for storing error/timeout 
-    information
-    """
-    if self.parallel_compile:
-        raise RuntimeError('MeasurementInterface.compile() not implemented for',
-                'parallel compilation')
-    pass
-
-  def run_precompiled(self, desired_result, input, limit, compile_result, id):
-    """
-    Run the given desired_result SEQUENTIALLY on input and produce a Result() 
-    Abort early if limit (in seconds) is reached Assume that the executable
-    to be measured has already been compiled to an executable corresponding to
-    identifier id by compile() The compile_result is the return result of compile(), 
-    and it will be None if compile() was not called
-    """
-    if self.parallel_compile:
-        raise RuntimeError('MeasurementInterface.run_precompiled() not implemented', 
-                'for parallel compilation')
-    pass
-
-  def cleanup(self, id):
-    """
-    Clean up any temporary files associated with the executable
-    """
-    pass
-
-  def pre_process(self):
-    """
-    The process before each iteration This method will be called
-    once per iteration before all threads are launched
-    """
-    pass
-
-  def post_process(self):
-    """
-    The process after each iteration This method will be called 
-    once per iteration after all threads are committed
-    """
-    pass
-
-  def extra_convergence_criteria(self, result):
-    """
-    The extra convergence criteria which returns True if the
-    current result is acceptable by the user
-    """
-    return False
-
-  #@abc.abstractmethod
-  def compile_and_run(self, desired_result, input, limit):
-    """
-    Compile and run the given desired_result on input and produce a 
-    Result(), abort early if limit (in seconds) is reached This function 
-    is only used for sequential execution flow
-
-    FIXME: Shoud uncomment @abc.abstractmethod Now comment out for
-    compatiability
-    """
-    return self.run(desired_result, input, limit)
-
-  def run(self, desired_result, input, limit):
-    """
-    run the given desired_result on input and produce a Result(),
-    abort early if limit (in seconds) is reached
-    """
-    return opentuner.resultdb.models.Result()
-
-  def save_final_config(self, config):
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    try:
-      config_str = repr(config.data)
-      if len(config_str) > 256:
-        config_str = config_str[:256] + '...'
-      log.info('final configuration: %s', config_str)
-      log.info('you may want to implement save_final_config(), to store this')
-    except:
-      log.error('error printing configuration', exc_info=True)
-
-  def db_program_version(self, session):
-    """return a version identifier for the program being tuned"""
-    return resultsdb.models.ProgramVersion.get(
-        session=session,
-        project=self.project_name(),
-        name=self.program_name(),
-        version=self.program_version(),
-        parameter_info=self.manipulator().parameters_to_json(),
-    )
-
-  def set_driver(self, measurement_driver):
-    self.driver = measurement_driver
-
-  def project_name(self):
-    if self._project is not None:
-      return self._project
-    autoname = re.sub('(Measurement?)Interface$', '', self.__class__.__name__)
-    if autoname:
-      return autoname
-    else:
-      return 'unknown'
-
-  def program_name(self):
-    return self._program
-
-  def program_version(self):
-    return self._version
-
-  def file_hash(self, filename):
-    """helper used to generate program versions"""
-    return hashlib.sha256(open(filename).read()).hexdigest()
-
-  def manipulator(self):
-    """
-    called once to create the search.manipulator.ConfigurationManipulator
-    """
-    if self._manipulator is None:
-      msg = ('MeasurementInterface.manipulator() must be implemented or a '
-             '"manipulator=..." must be provided to the constructor')
-      log.error(msg)
-      raise Exception(msg)
-    return self._manipulator
-
-  def objective(self):
-    """
-    called once to create the search.objective.SearchObjective
-    """
-    if self._objective is None:
-      from ..search.objective import MinimizeTime
-
-      return MinimizeTime()
-    return self._objective
-
-  def input_manager(self):
-    """
-    called once to create the measurement.inputmanager.InputManager
-    """
-    if self._objective is None:
-      from .inputmanager import FixedInputManager
-
-      return FixedInputManager()
-    return self._input_manager
-
-  def seed_configurations(self):
-    """
-    Extra seed configuration objects to add to those given on the command line.
-    Configuration objects (typically dictionaries) not database objects.
-    """
-    return []
-
-  def kill_all(self):
-    self.pid_lock.acquire()
-    for pid in self.pids:
-      goodkillpg(pid)
-    self.pids = []
-    self.pid_lock.release()
-
-  def call_program(self, cmd, limit=None, memory_limit=None, **kwargs):
-    """
-    call cmd and kill it if it runs for longer than limit
-
-    returns dictionary like
-      {'returncode': 0,
-       'stdout': '', 'stderr': '',
-       'timeout': False, 'time': 1.89}
-    """
-    the_io_thread_pool_init(self.args.parallelism)
-    if limit is float('inf'):
-      limit = None
-    if type(cmd) in (str, unicode):
-      kwargs['shell'] = True
-    killed = False
-    t0 = time.time()
-    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-                         preexec_fn=preexec_setpgid_setrlimit(memory_limit),
-                         **kwargs)
-    # Add p.pid to list of processes to kill in case of keyboardinterrupt
-    self.pid_lock.acquire()
-    self.pids.append(p.pid)
-    self.pid_lock.release()
-
-    try:
-      stdout_result = the_io_thread_pool.apply_async(p.stdout.read)
-      stderr_result = the_io_thread_pool.apply_async(p.stderr.read)
-      while p.returncode is None:
-        if limit is None:
-          goodwait(p)
-        elif limit and time.time() > t0 + limit:
-          killed = True
-          goodkillpg(p.pid)
-          goodwait(p)
-        else:
-          # still waiting...
-          sleep_for = limit - (time.time() - t0)
-          if not stdout_result.ready():
-            stdout_result.wait(sleep_for)
-          elif not stderr_result.ready():
-            stderr_result.wait(sleep_for)
-          else:
-            #TODO(jansel): replace this with a portable waitpid
-            time.sleep(0.001)
-        p.poll()
-    except:
-      if p.returncode is None:
-        goodkillpg(p.pid)
-      raise
-    finally:
-      # No longer need to kill p
-      self.pid_lock.acquire()
-      if p.pid in self.pids:
-        self.pids.remove(p.pid)
-      self.pid_lock.release()
-
-    t1 = time.time()
-    return {'time': float('inf') if killed else (t1 - t0),
-            'timeout': killed,
-            'returncode': p.returncode,
-            'stdout': stdout_result.get(),
-            'stderr': stderr_result.get()}
-
-  def prefix_hook(self, session):
-    pass
-
-  @classmethod
-  def main(cls, args, *pargs, **kwargs):
-    from opentuner.tuningrunmain import TuningRunMain
-
-    return TuningRunMain(cls(args, *pargs, **kwargs), args).main()
-
-
-class DefaultMeasurementInterface(MeasurementInterface):
-  def run(self, desired_result, input, limit):
-    raise RuntimeError('MeasurementInterface.run() not implemented')
-
-
-def preexec_setpgid_setrlimit(memory_limit):
-  if resource is not None:
-    def _preexec():
-      os.setpgid(0, 0)
-      try:
-        resource.setrlimit(resource.RLIMIT_CORE, (0, 0))
-      except ValueError:
-        pass  # No permission
-      if memory_limit:
-        try:
-          (soft, hard) = resource.getrlimit(resource.RLIMIT_AS)
-          resource.setrlimit(resource.RLIMIT_AS, (min(soft, memory_limit),
-                                                  min(hard, memory_limit)))
-        except ValueError:
-          pass  # No permission
-    return _preexec
-
-
-def the_io_thread_pool_init(parallelism=1):
-  global the_io_thread_pool
-  if the_io_thread_pool is None:
-    the_io_thread_pool = ThreadPool(2 * parallelism)
-    # make sure the threads are started up
-    the_io_thread_pool.map(int, range(2 * parallelism))
-
-
-def goodkillpg(pid):
-  """
-  wrapper around kill to catch errors
-  """
-  log.debug("killing pid %d", pid)
-  try:
-    if hasattr(os, 'killpg'):
-      os.killpg(pid, signal.SIGKILL)
-    else:
-      os.kill(pid, signal.SIGKILL)
-  except:
-    log.error('error killing process %s', pid, exc_info=True)
-
-
-def goodwait(p):
-  """
-  python doesn't check if its system calls return EINTR, retry if it does
-  """
-  while True:
-    try:
-      rv = p.wait()
-      return rv
-    except OSError, e:
-      if e.errno != errno.EINTR:
-        raise
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/.#interface.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/.#interface.py
deleted file mode 120000
index 68c682013089268d9e8f3e50ca41da1228c544c5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/.#interface.py
+++ /dev/null
@@ -1 +0,0 @@
-hashim@hashim-VirtualBox.2708:1511328915
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/__init__.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/__init__.py
deleted file mode 100644
index c289e8d6f5081d846ef431f36649b6e976df1a82..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-
-import driver
-import interface
-from interface import MeasurementInterface
-from driver import MeasurementDriver
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/driver.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/driver.py
deleted file mode 100644
index d00886920a95e2b7c61ca41b6aea0a89247ab8c9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/driver.py
+++ /dev/null
@@ -1,271 +0,0 @@
-import argparse
-import logging
-import time
-import socket
-import os
-from multiprocessing.pool import ThreadPool
-from datetime import datetime
-
-from sqlalchemy.exc import SQLAlchemyError
-from sqlalchemy.orm.exc import NoResultFound
-
-from opentuner.driverbase import DriverBase
-from opentuner.resultsdb.models import *
-
-log = logging.getLogger(__name__)
-
-argparser = argparse.ArgumentParser(add_help=False)
-argparser.add_argument('--machine-class',
-                       help="name of the machine class being run on")
-
-
-class MeasurementDriver(DriverBase):
-  """
-  manages the measurement process, reading DesiredResults and creating Results
-  """
-
-  def __init__(self,
-               measurement_interface,
-               input_manager,
-               **kwargs):
-    super(MeasurementDriver, self).__init__(**kwargs)
-
-    if not self.args.machine_class:
-      self.args.machine_class = 'default'
-
-    self.interface = measurement_interface
-    self.input_manager = input_manager
-    self.commit = self.tuning_run_main.commit
-    self.upper_limit_multiplier = 10.0
-    self.default_limit_multiplier = 2.0
-
-    self.laptime = time.time()
-    self.machine = self.get_machine()
-
-  def get_machine(self):
-    """
-    get (or create) the machine we are currently running on
-    """
-    hostname = socket.gethostname()
-    try:
-      self.session.flush()
-      return self.session.query(Machine).filter_by(name=hostname).one()
-    except sqlalchemy.orm.exc.NoResultFound:
-      m = Machine(name=hostname,
-                  cpu=_cputype(),
-                  cores=_cpucount(),
-                  memory_gb=_memorysize() / (
-                  1024.0 ** 3) if _memorysize() else 0,
-                  machine_class=self.get_machine_class())
-      self.session.add(m)
-      return m
-
-  def get_machine_class(self):
-    """
-    get (or create) the machine class we are currently running on
-    """
-    return MachineClass.get(self.session, name=self.args.machine_class)
-
-  def run_time_limit(self, desired_result, default=3600.0 * 24 * 365 * 10):
-    """return a time limit to apply to a test run (in seconds)"""
-    best = self.results_query(objective_ordered=True).first()
-    if best is None:
-      if desired_result.limit:
-        return desired_result.limit
-      else:
-        return default
-
-    if desired_result.limit:
-      return min(desired_result.limit, self.upper_limit_multiplier * best.time)
-    else:
-      return self.default_limit_multiplier * best.time
-
-  def report_result(self, desired_result, result, input=None):
-    result.configuration = desired_result.configuration
-    result.input = input
-    result.machine = self.machine
-    result.tuning_run = self.tuning_run
-    result.collection_date = datetime.now()
-    self.session.add(result)
-    desired_result.result = result
-    desired_result.state = 'COMPLETE'
-    self.input_manager.after_run(desired_result, input)
-    result.collection_cost = self.lap_timer()
-    self.session.flush()  # populate result.id
-    log.debug(
-        'Result(id=%d, cfg=%d, time=%.4f, accuracy=%.2f, collection_cost=%.2f)',
-        result.id,
-        result.configuration.id,
-        result.time,
-        result.accuracy if result.accuracy is not None else float('NaN'),
-        result.collection_cost)
-    self.commit()
-
-  def run_desired_result(self, desired_result, compile_result=None,
-                         exec_id=None):
-    """
-    create a new Result using input manager and measurment interface
-    Optional compile_result paramater can be passed to run_precompiled as
-    the return value of compile()
-    Optional exec_id paramater can be passed to run_precompiled in case of
-    locating a specific executable
-    """
-    desired_result.limit = self.run_time_limit(desired_result)
-
-    input = self.input_manager.select_input(desired_result)
-    self.session.add(input)
-    self.session.flush()
-
-    log.debug('running desired result %s on input %s', desired_result.id,
-              input.id)
-
-    self.input_manager.before_run(desired_result, input)
-
-    if self.interface.parallel_compile:
-        result = self.interface.run_precompiled(desired_result, input,
-                                                desired_result.limit,
-                                                compile_result, exec_id)
-    else:
-        result = self.interface.compile_and_run(desired_result, input,
-                                                desired_result.limit)
-
-    self.report_result(desired_result, result, input)
-
-  def lap_timer(self):
-    """return the time elapsed since the last call to lap_timer"""
-    t = time.time()
-    r = t - self.laptime
-    self.laptime = t
-    return r
-
-  def claim_desired_result(self, desired_result):
-    """
-    claim a desired result by changing its state to running
-    return True if the result was claimed for this process
-    """
-    self.commit()
-    try:
-      self.session.refresh(desired_result)
-      if desired_result.state == 'REQUESTED':
-        desired_result.state = 'RUNNING'
-        desired_result.start_date = datetime.now()
-        self.commit()
-        return True
-    except SQLAlchemyError:
-      self.session.rollback()
-    return False
-
-  def query_pending_desired_results(self):
-    q = (self.session.query(DesiredResult)
-         .filter_by(tuning_run=self.tuning_run,
-                    state='REQUESTED')
-         .order_by(DesiredResult.generation,
-                   DesiredResult.priority.desc()))
-    return q
-
-  def process_all(self):
-    """
-    process all desired_results in the database
-    """
-    self.lap_timer()  # reset timer
-    q = self.query_pending_desired_results()
-
-    if self.interface.parallel_compile:
-      desired_results = []
-      thread_args = []
-
-      def compile_result(args):
-        interface, data, result_id = args
-        return interface.compile(data, result_id)
-
-      for dr in q.all():
-        if self.claim_desired_result(dr):
-          desired_results.append(dr)
-          thread_args.append((self.interface, dr.configuration.data, dr.id))
-      if len(desired_results) == 0:
-        return
-      thread_pool = ThreadPool(len(desired_results))
-      # print 'Compiling %d results' % len(thread_args)
-      try:
-        # Use map_async instead of map because of bug where keyboardinterrupts are ignored
-        # See http://stackoverflow.com/questions/1408356/keyboard-interrupts-with-pythons-multiprocessing-pool
-        compile_results = thread_pool.map_async(compile_result,
-                                                thread_args).get(9999999)
-      except Exception:
-        # Need to kill other processes because only one thread receives
-        # exception
-        self.interface.kill_all()
-        raise
-      # print 'Running %d results' % len(thread_args)
-      for dr, compile_result in zip(desired_results, compile_results):
-        # Make sure compile was successful
-        self.run_desired_result(dr, compile_result, dr.id)
-        try:
-          self.interface.cleanup(dr.id)
-        except RuntimeError, e:
-          print e
-          # print 'Done!'
-      thread_pool.close()
-    else:
-      for dr in q.all():
-        if self.claim_desired_result(dr):
-          self.run_desired_result(dr)
-
-
-def _cputype():
-  try:
-    return re.search(r"model name\s*:\s*([^\n]*)",
-                     open("/proc/cpuinfo").read()).group(1)
-  except:
-    pass
-  try:
-    # for OS X
-    import subprocess
-
-    return subprocess.Popen(["sysctl", "-n", "machdep.cpu.brand_string"],
-                            stdout=subprocess.PIPE).communicate()[0].strip()
-  except:
-    log.warning("failed to get cpu type")
-  return "unknown"
-
-
-def _cpucount():
-  try:
-    return int(os.sysconf("SC_NPROCESSORS_ONLN"))
-  except:
-    pass
-  try:
-    return int(os.sysconf("_SC_NPROCESSORS_ONLN"))
-  except:
-    pass
-  try:
-    return int(os.environ["NUMBER_OF_PROCESSORS"])
-  except:
-    pass
-  try:
-    return int(os.environ["NUM_PROCESSORS"])
-  except:
-    log.warning("failed to get the number of processors")
-  return 1
-
-
-def _memorysize():
-  try:
-    return int(os.sysconf("SC_PHYS_PAGES") * os.sysconf("SC_PAGE_SIZE"))
-  except:
-    pass
-  try:
-    return int(os.sysconf("_SC_PHYS_PAGES") * os.sysconf("_SC_PAGE_SIZE"))
-  except:
-    pass
-  try:
-    # for OS X
-    import subprocess
-
-    return int(subprocess.Popen(["sysctl", "-n", "hw.memsize"],
-                                stdout=subprocess.PIPE)
-               .communicate()[0].strip())
-  except:
-    log.warning("failed to get total memory")
-  return 1024 ** 3
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/inputmanager.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/inputmanager.py
deleted file mode 100644
index 7acaeaa0cfa178c7e62716a29cca2e9497f255d1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/inputmanager.py
+++ /dev/null
@@ -1,76 +0,0 @@
-import abc
-import opentuner
-from opentuner.resultsdb.models import *
-
-
-class InputManager(object):
-  """
-  abstract base class for compile and measurement
-  """
-  __metaclass__ = abc.ABCMeta
-
-  def set_driver(self, measurement_driver):
-    self.driver = measurement_driver
-    self.session = measurement_driver.session
-    self.program = measurement_driver.tuning_run.program
-
-  @abc.abstractmethod
-  def select_input(self, desired_result):
-    """
-    select the input to be used to test desired_result
-    """
-    return opentuner.resultsdb.models.Input()
-
-
-  def before_run(self, desired_result, input):
-    """hook called before an input is used"""
-    pass
-
-  def after_run(self, desired_result, input):
-    """hook called after an input is used"""
-    pass
-
-  def get_input_class(self):
-    return None
-
-
-class FixedInputManager(InputManager):
-  """
-  an input manage that produces a single input for all tests
-  """
-
-  def __init__(self,
-               input_class_name='fixed',
-               size=-1,
-               path=None,
-               extra=None):
-    self.input_class_name = input_class_name
-    self.size = size
-    self.path = path
-    self.extra = extra
-    self.the_input = None
-    super(FixedInputManager, self).__init__()
-
-
-  def get_input_class(self):
-    return InputClass.get(self.session,
-                          program=self.program,
-                          name=self.input_class_name,
-                          size=self.size)
-
-  def create_input(self, desired_result):
-    """create the fixed input database object, result will be cached"""
-    return Input(input_class=self.get_input_class(),
-                 path=self.path,
-                 extra=self.extra)
-
-  def select_input(self, desired_result):
-    if self.the_input is None:
-      self.the_input = self.create_input(desired_result)
-    return self.the_input
-
-
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/interface.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/interface.py
deleted file mode 100644
index 174902488289fe4ef038a9dd3553ea13acc68f2b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/measurement/interface.py
+++ /dev/null
@@ -1,366 +0,0 @@
-import abc
-import argparse
-import errno
-import hashlib
-import logging
-import os
-import re
-import signal
-import subprocess
-import threading
-import time
-from multiprocessing.pool import ThreadPool
-
-try:
-  import resource
-except ImportError:
-  resource = None
-
-try:
-  import fcntl
-except ImportError:
-  fcntl = None
-
-import opentuner
-from opentuner import resultsdb
-from opentuner.resultsdb.models import *
-
-log = logging.getLogger(__name__)
-
-argparser = argparse.ArgumentParser(add_help=False)
-argparser.add_argument('--parallel-compile', action='store_true',
-                       default=False,
-                       help="present if compiling can be done in parallel")
-
-the_io_thread_pool = None
-
-
-class MeasurementInterface(object):
-  """
-  abstract base class for compile and measurement
-  """
-  __metaclass__ = abc.ABCMeta
-
-  def __init__(self,
-               args=None,
-               project_name=None,
-               program_name='unknown',
-               program_version='unknown',
-               manipulator=None,
-               objective=None,
-               input_manager=None):
-    self.args = args
-    self._project = project_name
-    self._program = program_name
-    self._version = program_version
-    self._objective = objective
-    self._manipulator = manipulator
-    self._input_manager = input_manager
-
-    self.pids = []
-    self.pid_lock = threading.Lock()
-    self.parallel_compile = args.parallel_compile
-    # If parallel_compile is False then compile_and_run() will be invoked
-    # sequentially otherwise the driver first invokes compile() in parallel
-    # followed by run_precompiled() sequentially
-
-  def compile(self, config_data, id):
-    """
-    Compile in PARALLEL according to the configuration in config_data 
-    (obtained from desired_result.configuration) Should use id parameter 
-    to determine output location of executable Return value will be passed 
-    to run_precompiled as compile_result, useful for storing error/timeout 
-    information
-    """
-    if self.parallel_compile:
-        raise RuntimeError('MeasurementInterface.compile() not implemented for',
-                'parallel compilation')
-    pass
-
-  def run_precompiled(self, desired_result, input, limit, compile_result, id):
-    """
-    Run the given desired_result SEQUENTIALLY on input and produce a Result() 
-    Abort early if limit (in seconds) is reached Assume that the executable
-    to be measured has already been compiled to an executable corresponding to
-    identifier id by compile() The compile_result is the return result of compile(), 
-    and it will be None if compile() was not called
-    """
-    if self.parallel_compile:
-        raise RuntimeError('MeasurementInterface.run_precompiled() not implemented', 
-                'for parallel compilation')
-    pass
-
-  def cleanup(self, id):
-    """
-    Clean up any temporary files associated with the executable
-    """
-    pass
-
-  def pre_process(self):
-    """
-    The process before each iteration This method will be called
-    once per iteration before all threads are launched
-    """
-    pass
-
-  def post_process(self):
-    """
-    The process after each iteration This method will be called 
-    once per iteration after all threads are committed
-    """
-    pass
-
-  def extra_convergence_criteria(self, result):
-    """
-    The extra convergence criteria which returns True if the
-    current result is acceptable by the user
-    """
-    return False
-
-  #@abc.abstractmethod
-  def compile_and_run(self, desired_result, input, limit):
-    """
-    Compile and run the given desired_result on input and produce a 
-    Result(), abort early if limit (in seconds) is reached This function 
-    is only used for sequential execution flow
-
-    FIXME: Shoud uncomment @abc.abstractmethod Now comment out for
-    compatiability
-    """
-    return self.run(desired_result, input, limit)
-
-  def run(self, desired_result, input, limit):
-    """
-    run the given desired_result on input and produce a Result(),
-    abort early if limit (in seconds) is reached
-    """
-    return opentuner.resultdb.models.Result()
-
-  def save_final_config(self, config):
-    """
-    called at the end of autotuning with the best resultsdb.models.Configuration
-    """
-    try:
-      config_str = repr(config.data)
-      if len(config_str) > 256:
-        config_str = config_str[:256] + '...'
-      log.info('final configuration: %s', config_str)
-      log.info('you may want to implement save_final_config(), to store this')
-    except:
-      log.error('error printing configuration', exc_info=True)
-
-  def db_program_version(self, session):
-    """return a version identifier for the program being tuned"""
-    return resultsdb.models.ProgramVersion.get(
-        session=session,
-        project=self.project_name(),
-        name=self.program_name(),
-        version=self.program_version(),
-        parameter_info=self.manipulator().parameters_to_json(),
-    )
-
-  def set_driver(self, measurement_driver):
-    self.driver = measurement_driver
-
-  def project_name(self):
-    if self._project is not None:
-      return self._project
-    autoname = re.sub('(Measurement?)Interface$', '', self.__class__.__name__)
-    if autoname:
-      return autoname
-    else:
-      return 'unknown'
-
-  def program_name(self):
-    return self._program
-
-  def program_version(self):
-    return self._version
-
-  def file_hash(self, filename):
-    """helper used to generate program versions"""
-    return hashlib.sha256(open(filename).read()).hexdigest()
-
-  def manipulator(self):
-    """
-    called once to create the search.manipulator.ConfigurationManipulator
-    """
-    if self._manipulator is None:
-      msg = ('MeasurementInterface.manipulator() must be implemented or a '
-             '"manipulator=..." must be provided to the constructor')
-      log.error(msg)
-      raise Exception(msg)
-    return self._manipulator
-
-  def objective(self):
-    """
-    called once to create the search.objective.SearchObjective
-    """
-    if self._objective is None:
-      from ..search.objective import MinimizeSize
-
-      return MinimizeSize()
-    return self._objective
-
-  def input_manager(self):
-    """
-    called once to create the measurement.inputmanager.InputManager
-    """
-    if self._objective is None:
-      from .inputmanager import FixedInputManager
-
-      return FixedInputManager()
-    return self._input_manager
-
-  def seed_configurations(self):
-    """
-    Extra seed configuration objects to add to those given on the command line.
-    Configuration objects (typically dictionaries) not database objects.
-    """
-    return []
-
-  def kill_all(self):
-    self.pid_lock.acquire()
-    for pid in self.pids:
-      goodkillpg(pid)
-    self.pids = []
-    self.pid_lock.release()
-
-  def call_program(self, cmd, limit=None, memory_limit=None, **kwargs):
-    """
-    call cmd and kill it if it runs for longer than limit
-
-    returns dictionary like
-      {'returncode': 0,
-       'stdout': '', 'stderr': '',
-       'timeout': False, 'time': 1.89}
-    """
-    the_io_thread_pool_init(self.args.parallelism)
-    if limit is float('inf'):
-      limit = None
-    if type(cmd) in (str, unicode):
-      kwargs['shell'] = True
-    killed = False
-    t0 = time.time()
-    p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-                         preexec_fn=preexec_setpgid_setrlimit(memory_limit),
-                         **kwargs)
-    # Add p.pid to list of processes to kill in case of keyboardinterrupt
-    self.pid_lock.acquire()
-    self.pids.append(p.pid)
-    self.pid_lock.release()
-
-    try:
-      stdout_result = the_io_thread_pool.apply_async(p.stdout.read)
-      stderr_result = the_io_thread_pool.apply_async(p.stderr.read)
-      while p.returncode is None:
-        if limit is None:
-          goodwait(p)
-        elif limit and time.time() > t0 + limit:
-          killed = True
-          goodkillpg(p.pid)
-          goodwait(p)
-        else:
-          # still waiting...
-          sleep_for = limit - (time.time() - t0)
-          if not stdout_result.ready():
-            stdout_result.wait(sleep_for)
-          elif not stderr_result.ready():
-            stderr_result.wait(sleep_for)
-          else:
-            #TODO(jansel): replace this with a portable waitpid
-            time.sleep(0.001)
-        p.poll()
-    except:
-      if p.returncode is None:
-        goodkillpg(p.pid)
-      raise
-    finally:
-      # No longer need to kill p
-      self.pid_lock.acquire()
-      if p.pid in self.pids:
-        self.pids.remove(p.pid)
-      self.pid_lock.release()
-
-    # TODO-autotune: Extract the file size and use it
-    # FIXIT: Appropriately update the file size
-    t1 = time.time()
-    return {'time': float('inf') if killed else (t1 - t0),
-            'timeout': killed,
-            'returncode': p.returncode,
-            'stdout': stdout_result.get(),
-            'stderr': stderr_result.get(),
-            }
-
-  def getFileSize(self,filename):
-    fileinfo=os.stat(filename)
-    file_size=fileinfo.st_size
-    return  {'binary_size': file_size}
-
-  def prefix_hook(self, session):
-    pass
-
-  @classmethod
-  def main(cls, args, *pargs, **kwargs):
-    from opentuner.tuningrunmain import TuningRunMain
-
-    return TuningRunMain(cls(args, *pargs, **kwargs), args).main()
-
-
-class DefaultMeasurementInterface(MeasurementInterface):
-  def run(self, desired_result, input, limit):
-    raise RuntimeError('MeasurementInterface.run() not implemented')
-
-
-def preexec_setpgid_setrlimit(memory_limit):
-  if resource is not None:
-    def _preexec():
-      os.setpgid(0, 0)
-      try:
-        resource.setrlimit(resource.RLIMIT_CORE, (0, 0))
-      except ValueError:
-        pass  # No permission
-      if memory_limit:
-        try:
-          (soft, hard) = resource.getrlimit(resource.RLIMIT_AS)
-          resource.setrlimit(resource.RLIMIT_AS, (min(soft, memory_limit),
-                                                  min(hard, memory_limit)))
-        except ValueError:
-          pass  # No permission
-    return _preexec
-
-
-def the_io_thread_pool_init(parallelism=1):
-  global the_io_thread_pool
-  if the_io_thread_pool is None:
-    the_io_thread_pool = ThreadPool(2 * parallelism)
-    # make sure the threads are started up
-    the_io_thread_pool.map(int, range(2 * parallelism))
-
-
-def goodkillpg(pid):
-  """
-  wrapper around kill to catch errors
-  """
-  log.debug("killing pid %d", pid)
-  try:
-    if hasattr(os, 'killpg'):
-      os.killpg(pid, signal.SIGKILL)
-    else:
-      os.kill(pid, signal.SIGKILL)
-  except:
-    log.error('error killing process %s', pid, exc_info=True)
-
-
-def goodwait(p):
-  """
-  python doesn't check if its system calls return EINTR, retry if it does
-  """
-  while True:
-    try:
-      rv = p.wait()
-      return rv
-    except OSError, e:
-      if e.errno != errno.EINTR:
-        raise
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/resultsdb/__init__.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/resultsdb/__init__.py
deleted file mode 100644
index a0150a1577e22cdfd50e490bb4a0c6b735bfcac8..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/resultsdb/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-
-from connect import connect
-
-import models
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/resultsdb/connect.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/resultsdb/connect.py
deleted file mode 100644
index 1a04d05447a3b62d241a4f2402c22cac15b98b3b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/resultsdb/connect.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from sqlalchemy import create_engine
-from sqlalchemy.orm import scoped_session, sessionmaker
-from models import Base, _Meta
-import logging
-import time
-from pprint import pprint
-
-log = logging.getLogger(__name__)
-
-DB_VERSION = "0.0"
-
-if False:  # profiling of queries
-  import atexit
-  from sqlalchemy import event
-  from collections import Counter
-  from sqlalchemy.engine import Engine
-  the_query_totals = Counter()
-
-  @event.listens_for(Engine, "before_cursor_execute")
-  def before_cursor_execute(conn, cursor, statement,
-                            parameters, context, executemany):
-      context._query_start_time = time.time()
-
-  @event.listens_for(Engine, "after_cursor_execute")
-  def after_cursor_execute(conn, cursor, statement,
-                           parameters, context, executemany):
-      total = time.time() - context._query_start_time
-      the_query_totals[statement] += total
-
-  @atexit.register
-  def report():
-    pprint(the_query_totals.most_common(10))
-
-
-def connect(dbstr):
-  engine = create_engine(dbstr, echo = False)
-  connection = engine.connect()
-
-  #handle case that the db was initialized before a version table existed yet
-  if engine.dialect.has_table(connection, "program"):
-    # if there are existing tables
-    if not engine.dialect.has_table(connection, "_meta"):
-      # if no version table, assume outdated db version and error
-      connection.close()
-      raise Exception("Your opentuner database is currently out of date. Save a back up and reinitialize")
-
-  # else if we have the table already, make sure version matches
-  if engine.dialect.has_table(connection, "_meta"):
-    Session = scoped_session(sessionmaker(autocommit=False,
-                                          autoflush=False,
-                                          bind=engine))
-    version = _Meta.get_version(Session)
-    if not DB_VERSION == version:
-      raise Exception('Your opentuner database version {} is out of date with the current version {}'.format(version, DB_VERSION))
-
-  Base.metadata.create_all(engine)
-
-  Session = scoped_session(sessionmaker(autocommit=False,
-                                        autoflush=False,
-                                        bind=engine))
-  # mark database with current version
-  _Meta.add_version(Session, DB_VERSION)
-  Session.commit()
-
-  return engine, Session
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/resultsdb/models.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/resultsdb/models.py
deleted file mode 100644
index dd88ae8e51c0d94db2364cbc444b9a11d2667116..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/resultsdb/models.py
+++ /dev/null
@@ -1,319 +0,0 @@
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.ext.declarative import declared_attr
-from sqlalchemy import create_engine
-from sqlalchemy.orm import relationship, backref
-from sqlalchemy import (
-  Column, Integer, String, DateTime, Boolean, Enum,
-  Float, PickleType, ForeignKey, Text, func, Index)
-import sqlalchemy
-import re
-
-from cPickle import dumps, loads
-from gzip import zlib
-class CompressedPickler(object):
-  @classmethod
-  def dumps(cls, obj, protocol=2):
-    s = dumps(obj, protocol)
-    sz = zlib.compress(s, 9)
-    if len(sz) < len(s):
-      return sz
-    else:
-      return s
-
-  @classmethod
-  def loads(cls, string):
-    try:
-      s = zlib.decompress(string)
-    except:
-      s = string
-    return loads(s)
-
-class Base(object):
-  @declared_attr
-  def __tablename__(cls):
-    """convert camel case to underscores"""
-    return re.sub(r'([a-z])([A-Z])', r'\1_\2', cls.__name__).lower()
-
-  id = Column(Integer, primary_key=True, index=True)
-
-
-Base = declarative_base(cls=Base)
-
-class _Meta(Base):
-  """ meta table to track current version """
-  db_version = Column(String(128))
-
-  @classmethod
-  def has_version(cls, session, version):
-    try:
-      session.flush()
-      session.query(_Meta).filter_by(db_version=version).one()
-      return True
-    except sqlalchemy.orm.exc.NoResultFound:
-      return False
-
-  @classmethod
-  def get_version(cls, session):
-    try:
-      session.flush()
-      x = session.query(_Meta).one()
-      return x.db_version
-    except sqlalchemy.orm.exc.NoResultFound:
-      return None
-
-  @classmethod
-  def add_version(cls, session, version):
-    if not cls.has_version(session, version):
-      session.add(_Meta(db_version=version))
-
-
-class Program(Base):
-  project = Column(String(128))
-  name = Column(String(128))
-
-  @classmethod
-  def get(cls, session, project, name):
-    try:
-      session.flush()
-      return session.query(Program).filter_by(project=project, name=name).one()
-    except sqlalchemy.orm.exc.NoResultFound:
-      t = Program(project=project, name=name)
-      session.add(t)
-      return t
-
-
-class ProgramVersion(Base):
-  program_id = Column(ForeignKey(Program.id))
-  program = relationship(Program, backref='versions')
-  version = Column(String(128))
-  parameter_info = Column(Text)
-
-  @property
-  def name(self):
-    return self.program.name
-
-  @property
-  def project(self):
-    return self.program.project
-
-  @classmethod
-  def get(cls, session, project, name, version, parameter_info=None):
-    program = Program.get(session, project, name)
-    try:
-      session.flush()
-      if parameter_info is None:
-        return session.query(ProgramVersion).filter_by(program=program,
-                                                     version=version).one()
-      else:
-        return session.query(ProgramVersion).filter_by(program=program,
-                                                      version=version,
-                                                      parameter_info=parameter_info).one()
-    except sqlalchemy.orm.exc.NoResultFound:
-      t = ProgramVersion(program=program, version=version, parameter_info=parameter_info)
-      session.add(t)
-      return t
-
-
-class Configuration(Base):
-  program_id = Column(ForeignKey(Program.id))
-  program = relationship(Program)
-  hash = Column(String(64))
-  data = Column(PickleType(pickler=CompressedPickler))
-
-  @classmethod
-  def get(cls, session, program, hashv, datav):
-    try:
-      session.flush()
-      return (session.query(Configuration)
-              .filter_by(program=program, hash=hashv).one())
-    except sqlalchemy.orm.exc.NoResultFound:
-      t = Configuration(program=program, hash=hashv, data=datav)
-      session.add(t)
-      return t
-
-
-Index('ix_configuration_custom1', Configuration.program_id, Configuration.hash)
-
-
-class MachineClass(Base):
-  name = Column(String(128))
-
-  @classmethod
-  def get(cls, session, name):
-    try:
-      session.flush()
-      return session.query(MachineClass).filter_by(name=name).one()
-    except sqlalchemy.orm.exc.NoResultFound:
-      t = MachineClass(name=name)
-      session.add(t)
-      return t
-
-
-class Machine(Base):
-  name = Column(String(128))
-
-  cpu = Column(String(128))
-  cores = Column(Integer)
-  memory_gb = Column(Float)
-
-  machine_class_id = Column(ForeignKey(MachineClass.id))
-  machine_class = relationship(MachineClass, backref='machines')
-
-
-class InputClass(Base):
-  program_id = Column(ForeignKey(Program.id))
-  program = relationship(Program, backref='inputs')
-
-  name = Column(String(128))
-  size = Column(Integer)
-
-  @classmethod
-  def get(cls, session, program, name='default', size=-1):
-    try:
-      session.flush()
-      return session.query(InputClass).filter_by(program=program,
-                                                 name=name,
-                                                 size=size).one()
-    except sqlalchemy.orm.exc.NoResultFound:
-      t = InputClass(program=program, name=name, size=size)
-      session.add(t)
-      return t
-
-
-class Input(Base):
-  #state          = Column(Enum('ANY_MACHINE', 'SINGLE_MACHINE', 'DELETED'),
-  #                        default='ANY_MACHINE', name='t_input_state')
-
-  input_class_id = Column(ForeignKey(InputClass.id))
-  input_class = relationship(InputClass, backref='inputs')
-
-  #optional, set only for state='SINGLE_MACHINE'
-  #machine_id     = Column(ForeignKey(MachineClass.id))
-  #machine        = relationship(MachineClass, backref='inputs')
-
-  #optional, for use by InputManager
-  path = Column(Text)
-  extra = Column(PickleType(pickler=CompressedPickler))
-
-
-class TuningRun(Base):
-  uuid = Column(String(32), index=True, unique=True)
-
-  program_version_id = Column(ForeignKey(ProgramVersion.id))
-  program_version = relationship(ProgramVersion, backref='tuning_runs')
-
-  machine_class_id = Column(ForeignKey(MachineClass.id))
-  machine_class = relationship(MachineClass, backref='tuning_runs')
-
-  input_class_id = Column(ForeignKey(InputClass.id))
-  input_class = relationship(InputClass, backref='tuning_runs')
-
-  name = Column(String(128), default='unnamed')
-  args = Column(PickleType(pickler=CompressedPickler))
-  objective = Column(PickleType(pickler=CompressedPickler))
-
-  state = Column(Enum('QUEUED', 'RUNNING', 'COMPLETE', 'ABORTED',
-                      name='t_tr_state'),
-                 default='QUEUED')
-  start_date = Column(DateTime, default=func.now())
-  end_date = Column(DateTime)
-
-  final_config_id = Column(ForeignKey(Configuration.id))
-  final_config = relationship(Configuration)
-
-  #__mapper_args__ = {'primary_key': uuid}
-
-  @property
-  def program(self):
-    return self.program_version.program
-
-
-class Result(Base):
-  #set by MeasurementDriver:
-  configuration_id = Column(ForeignKey(Configuration.id))
-  configuration = relationship(Configuration)
-
-  machine_id = Column(ForeignKey(Machine.id))
-  machine = relationship(Machine, backref='results')
-
-  input_id = Column(ForeignKey(Input.id))
-  input = relationship(Input, backref='results')
-
-  tuning_run_id = Column(ForeignKey(TuningRun.id), index=True)
-  tuning_run = relationship(TuningRun, backref='results')
-
-  collection_date = Column(DateTime, default=func.now())
-  collection_cost = Column(Float)
-
-  #set by MeasurementInterface:
-  state = Column(Enum('OK', 'TIMEOUT', 'ERROR',
-                      name='t_result_state'),
-                 default='OK')
-  time = Column(Float)
-  accuracy = Column(Float)
-  energy = Column(Float)
-  size = Column(Float)
-  confidence = Column(Float)
-  #extra = Column(PickleType)
-
-  #set by SearchDriver
-  was_new_best = Column(Boolean)
-
-
-Index('ix_result_custom1', Result.tuning_run_id, Result.was_new_best)
-
-
-class DesiredResult(Base):
-  #set by the technique:
-  configuration_id = Column(ForeignKey(Configuration.id))
-  configuration = relationship(Configuration)
-  limit = Column(Float)
-
-  #set by the search driver
-  priority = Column(Float)
-  tuning_run_id = Column(ForeignKey(TuningRun.id))
-  tuning_run = relationship(TuningRun, backref='desired_results')
-  generation = Column(Integer)
-  requestor = Column(String(128))
-  request_date = Column(DateTime, default=func.now())
-
-  #set by the measurement driver
-  state = Column(Enum('UNKNOWN', 'REQUESTED', 'RUNNING',
-                      'COMPLETE', 'ABORTED',
-                      name="t_dr_state"),
-                 default='UNKNOWN')
-  result_id = Column(ForeignKey(Result.id), index=True)
-  result = relationship(Result, backref='desired_results')
-  start_date = Column(DateTime)
-
-  #input_id        = Column(ForeignKey(Input.id))
-  #input           = relationship(Input, backref='desired_results')
-
-
-Index('ix_desired_result_custom1', DesiredResult.tuning_run_id,
-      DesiredResult.generation)
-
-Index('ix_desired_result_custom2', DesiredResult.tuning_run_id,
-      DesiredResult.configuration_id)
-
-
-# track bandit meta-technique information if a bandit meta-technique is used for a tuning run.
-class BanditInfo(Base):
-  tuning_run_id = Column(ForeignKey(TuningRun.id))
-  tuning_run = relationship(TuningRun, backref='bandit_info')
-  # the bandit exploration/exploitation tradeoff
-  c = Column(Float)
-  # the bandit window
-  window = Column(Integer)
-
-class BanditSubTechnique(Base):
-  bandit_info_id = Column(ForeignKey(BanditInfo.id))
-  bandit_info = relationship(BanditInfo, backref='subtechniques')
-  name = Column(String(128))
-
-
-if __name__ == '__main__':
-  #test:
-  engine = create_engine('sqlite:///:memory:', echo=True)
-  Base.metadata.create_all(engine)
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/__init__.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/__init__.py
deleted file mode 100644
index bb4ce57bb2d1760bd9fb6ebe196f39072a43ab4a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-
-import driver
-import objective
-import plugin
-import technique
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/bandittechniques.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/bandittechniques.py
deleted file mode 100644
index 29816c03de1c52b4b6318991faafb488952e4019..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/bandittechniques.py
+++ /dev/null
@@ -1,316 +0,0 @@
-import abc
-import copy
-import logging
-import math
-import random
-from collections import deque
-
-from .metatechniques import MetaSearchTechnique
-from .technique import register, SearchTechnique, all_techniques, get_random_generator_technique
-
-log = logging.getLogger(__name__)
-
-
-class BanditQueue(object):
-  def __init__(self, keys, C=0.05, window=500, **kwargs):
-    """
-    C is exploration/exploitation tradeoff
-    window is how long to remember past results
-    """
-    super(BanditQueue, self).__init__(**kwargs)
-    self.C = C
-    self.history = deque()
-    self.keys = keys
-    self.use_counts = dict(((k, 0) for k in keys))
-    self.window = window
-    self.request_count = 0
-
-  @abc.abstractmethod
-  def exploitation_term(self, key):
-    """
-    value 0 to 1.0 to represent quality of technique
-    """
-    return 0.0
-
-  def exploration_term(self, key):
-    """
-    value represent how unsure we are (optimal bandit solution)
-    """
-    if self.use_counts[key] > 0:
-      return math.sqrt((2.0 * math.log(len(self.history), 2.0))
-                       / self.use_counts[key])
-    else:
-      return float('inf')
-
-  def bandit_score(self, key):
-    return (self.exploitation_term(key) +
-            self.C * self.exploration_term(key))
-
-  def ordered_keys(self):
-    """select the next technique to use"""
-
-    keys = list(self.keys)
-    random.shuffle(keys)  # break ties randomly
-    keys.sort(key=self.bandit_score)
-
-    self.request_count += 1
-    if log.isEnabledFor(logging.DEBUG) and (self.request_count % 1000) == 0:
-      log.debug(str([
-          (t, self.exploitation_term(t), self.C * self.exploration_term(t))
-          for t in keys]))
-
-    return reversed(keys)
-
-  def on_result(self, key, value):
-    self.history.append((key, value))
-    self.on_push_history(key, value)
-    if len(self.history) > self.window:
-      self.on_pop_history(*self.history.popleft())
-
-  def on_push_history(self, key, value):
-    self.use_counts[key] += 1
-
-  def on_pop_history(self, key, value):
-    self.use_counts[key] -= 1
-
-
-class AUCBanditQueue(BanditQueue):
-  """
-  Area Under the Receiving Operator Curve (AUC) credit assignment
-
-  See:
-  Comparison-based adaptive strategy selection with bandits in differential
-  evolution. Fialho et al.
-  """
-
-  def __init__(self, *args, **kwargs):
-    super(AUCBanditQueue, self).__init__(*args, **kwargs)
-    self.debug = kwargs.get('debug', False)
-    self.auc_sum = dict(((t, 0) for t in self.keys))
-    self.auc_decay = dict(((t, 0) for t in self.keys))
-
-  def exploitation_term_slow(self, key):
-    """
-    value 0 to 1.0 to represent quality of key
-
-    computes the area under the curve where finding a new
-    global best results in adding 1 to a cumulative total
-    """
-    score = 0.0
-    pos = 0
-    for t, value in self.history:
-      if t is key:
-        pos += 1
-        if value:
-          score += pos
-    if pos:
-      return score * 2.0 / (pos * (pos + 1.0))
-    else:
-      return 0.0
-
-  def exploitation_term_fast(self, key):
-    """
-    value 0 to 1.0 to represent quality of key
-
-    optimized O(1) implementation exploitation_term_slow()
-    """
-    score = self.auc_sum[key]
-    pos = self.use_counts[key]
-    if pos:
-      return score * 2.0 / (pos * (pos + 1.0))
-    else:
-      return 0.0
-
-  def exploitation_term(self, key):
-    v1 = self.exploitation_term_fast(key)
-    if self.debug:
-      v2 = self.exploitation_term_slow(key)
-      assert v1 == v2
-    return v1
-
-  def on_push_history(self, key, value):
-    super(AUCBanditQueue, self).on_push_history(key, value)
-    if value:
-      self.auc_sum[key] += self.use_counts[key]
-      self.auc_decay[key] += 1
-
-  def on_pop_history(self, key, value):
-    super(AUCBanditQueue, self).on_pop_history(key, value)
-    self.auc_sum[key] -= self.auc_decay[key]
-    if value:
-      self.auc_decay[key] -= 1
-
-
-
-class AUCBanditMetaTechnique(MetaSearchTechnique):
-  def __init__(self, techniques, bandit_kwargs=dict(), **kwargs):
-    super(AUCBanditMetaTechnique, self).__init__(techniques, **kwargs)
-    self.bandit = AUCBanditQueue([t.name for t in techniques], **bandit_kwargs)
-    self.name_to_technique = dict(((t.name, t) for t in self.techniques))
-
-  def select_technique_order(self):
-    """select the next technique to use"""
-    return (self.name_to_technique[k] for k in self.bandit.ordered_keys())
-
-  def on_technique_result(self, technique, result):
-    self.bandit.on_result(technique.name, result.was_new_best)
-
-  def on_technique_no_desired_result(self, technique):
-    """treat not providing a configuration as not a best"""
-    self.bandit.on_result(technique.name, 0)
-
-  @classmethod
-  def generate_technique(cls, manipulator=None, num_techniques=5, retry_count=3, generator_weight=10, *args, **kwargs):
-    """
-    Generate a bandit by randomly selecting existing techniques or composable techniques.
-    If a composable technique is selected, the operators are then chosen
-
-    :param manipulator: a ConfigurationManipulator used to enumerate parameters
-    :param num_techniques: max number of subtechniques in the bandit
-    :param retry_count: number of times to try getting a new technique before giving up
-    :param generator_weight: weight to increase probability of choosing to generate a technique
-    """
-    techniques, generators = all_techniques()
-
-    # get set of parameters to consider
-    paramset = set()
-    for p in manipulator.params:
-      paramset.add(type(p))
-
-    # filter techniques to get rid of metatechniques
-    basetechniques = [t for t in techniques if not isinstance(t, MetaSearchTechnique)]
-    bandit_techniques = []
-    for i in range(num_techniques):
-      for j in range(retry_count):
-        # pick a technique or generate a composable
-        if random.random() < float(len(basetechniques)) / (len(basetechniques) + generator_weight*len(generators)):
-          candidate = copy.deepcopy(random.choice(basetechniques))
-        else:
-          # pick a random generator
-          candidate = get_random_generator_technique(generators, manipulator=manipulator)
-        if not (candidate.name in [t.name for t in bandit_techniques]):
-          bandit_techniques.append(candidate)
-          break
-
-    # make a bandit of the output list
-    return cls(bandit_techniques, name="GeneratedBandit", *args, **kwargs)
-
-
-class AUCBanditMutationTechnique(SearchTechnique):
-  def __init__(self, bandit_kwargs=dict(), **kwargs):
-    super(AUCBanditMutationTechnique, self).__init__(**kwargs)
-    self.bandit = None
-    self.bandit_kwargs = bandit_kwargs
-    self.pending_results = []
-
-  def handle_requested_result(self, result):
-    for i in xrange(len(self.pending_results)):
-      cfg, name, index = self.pending_results[i]
-      if result.configuration == cfg:
-        self.bandit.on_result((name, index), result.was_new_best)
-        del self.pending_results[i]
-        return
-    log.warning("unexpected result")
-
-  def desired_configuration(self):
-    """
-    use bandit to pick a single manipulator and apply it
-    """
-    seed = self.get_seed()
-    if self.bandit is None:
-      self.init_bandit(seed)
-
-    cfg = self.manipulator.copy(seed)
-    hash1 = self.manipulator.hash_config(cfg)
-    params = self.manipulator.parameters_dict(cfg)
-    for name, index in self.bandit.ordered_keys():
-      if name in params:
-        param = params[name]
-        fns = param.manipulators(cfg)
-        fn = fns[index % len(fns)]
-        fn(cfg)
-        hash2 = self.manipulator.hash_config(cfg)
-        if hash1 != hash2:
-          cfg = self.driver.get_configuration(cfg)
-          self.pending_results.append((cfg, name, index))
-          log.debug("applied %s[%s] manipulator function", name, index)
-          return cfg
-
-    return None
-
-
-  def init_bandit(self, cfg):
-    options = []
-    for param in self.manipulator.parameters(cfg):
-      for i in xrange(len(param.manipulators(cfg))):
-        options.append((param.name, i))
-    # TODO(jansel): remove assumption that set of parameters are fixed
-    self.bandit = AUCBanditQueue(options, **self.bandit_kwargs)
-
-  def get_seed(self):
-    """seed mutation with global best"""
-    if (self.driver.best_result is not None and
-        self.driver.best_result.state == 'OK'):
-      return self.driver.best_result.configuration.data
-    else:
-      return self.manipulator.random()
-
-
-import evolutionarytechniques
-import differentialevolution
-import simplextechniques
-import patternsearch
-import simulatedannealing
-from pso import PSO, HybridParticle
-import globalGA
-register(AUCBanditMutationTechnique())
-
-register(AUCBanditMetaTechnique([
-        differentialevolution.DifferentialEvolutionAlt(),
-        evolutionarytechniques.UniformGreedyMutation(),
-        evolutionarytechniques.NormalGreedyMutation(mutation_rate=0.3),
-        simplextechniques.RandomNelderMead(),
-      ], name = "AUCBanditMetaTechniqueA"))
-register(AUCBanditMetaTechnique([
-        differentialevolution.DifferentialEvolutionAlt(),
-        evolutionarytechniques.UniformGreedyMutation(),
-      ], name = "AUCBanditMetaTechniqueB"))
-register(AUCBanditMetaTechnique([
-        differentialevolution.DifferentialEvolutionAlt(),
-        patternsearch.PatternSearch(),
-      ], name = "AUCBanditMetaTechniqueC"))
-register(AUCBanditMetaTechnique([
-        PSO(crossover = 'op3_cross_OX3'),
-        PSO(crossover = 'op3_cross_OX1'),
-        PSO(crossover = 'op3_cross_CX'),
-        PSO(crossover = 'op3_cross_PMX'),
-        PSO(crossover = 'op3_cross_PX'),
-        evolutionarytechniques.GA(crossover = 'op3_cross_OX3', mutation_rate=0.01, crossover_rate=0.8),
-        evolutionarytechniques.GA(crossover = 'op3_cross_OX1', mutation_rate=0.01, crossover_rate=0.8),
-        evolutionarytechniques.GA(crossover = 'op3_cross_CX', mutation_rate=0.01, crossover_rate=0.8),
-        evolutionarytechniques.GA(crossover = 'op3_cross_PX', mutation_rate=0.01, crossover_rate=0.8),
-        evolutionarytechniques.GA(crossover = 'op3_cross_PMX', mutation_rate=0.01, crossover_rate=0.8),
-        evolutionarytechniques.UniformGreedyMutation(name='ga-base', mutation_rate=0.01)
-      ], name = "PSO_GA_Bandit"))
-register(AUCBanditMetaTechnique([
-	differentialevolution.DifferentialEvolutionAlt(),
-	simulatedannealing.PseudoAnnealingSearch()
-      ], name = "test"))
-register(AUCBanditMetaTechnique([
-        differentialevolution.DifferentialEvolutionAlt(),
-        evolutionarytechniques.UniformGreedyMutation(),
-        evolutionarytechniques.NormalGreedyMutation(mutation_rate=0.3),
-        simplextechniques.RandomNelderMead(),
-	simulatedannealing.PseudoAnnealingSearch()
-      ], name = "test2"))
-register(AUCBanditMetaTechnique([
-	PSO(crossover='op3_cross_OX1'),
-	PSO(crossover='op3_cross_PMX'),
-	PSO(crossover='op3_cross_PX'),
-	evolutionarytechniques.GA(crossover='op3_cross_OX1', crossover_rate=0.5),
-	evolutionarytechniques.GA(crossover='op3_cross_PMX', crossover_rate=0.5),
-	evolutionarytechniques.GA(crossover='op3_cross_PX', crossover_rate=0.5),
-	differentialevolution.DifferentialEvolutionAlt(),
-        globalGA.NormalGreedyMutation( crossover_rate=0.5, crossover_strength=0.2, name='GGA')
-	], name='PSO_GA_DE'))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/composableevolutionarytechniques.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/composableevolutionarytechniques.py
deleted file mode 100644
index e511744f30b8a7d271539e4ed26e247b5574c2b5..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/composableevolutionarytechniques.py
+++ /dev/null
@@ -1,512 +0,0 @@
-import random
-import time
-import sys
-import json
-from fn import _
-from technique import all_techniques
-from technique import register
-from technique import register_generator
-from technique import SequentialSearchTechnique
-from manipulator import *
-from opentuner.search.manipulator import Parameter
-
-
-class PopulationMember(object):
-  """
-  An extendable object representing a population member for ComposableEvolutionaryTechniques.
-  Must have the field "config" which is a configuration
-  """
-  def __init__(self, config):
-    self.config = config
-    self.timestamp = time.time()
-
-  def touch(self):
-    """
-    Update the timestamp on a PopulationMember
-    """
-    self.timestamp = time.time()
-
-
-class ComposableEvolutionaryTechnique(SequentialSearchTechnique):
-  """
-  An abstract base class for a technique that is composable with operators
-  """
-  __metaclass__ = abc.ABCMeta
-
-  # operator_map - from param-type to dict with operator name + list of arguments TODO
-  # min_parent - minimum number of parents returned. Limits which operators can be used
-  def __init__(self,
-               operator_map = {},
-               population_size = 50,
-               initial_configs = None,
-               *pargs,
-               **kwargs):
-    """
-
-    :param operator_map:
-    :param population_size:
-    :param initial_configs:
-    :param pargs:
-    :param kwargs:
-    :return:
-    """
-    super(ComposableEvolutionaryTechnique, self).__init__(*pargs, **kwargs)
-    # generate a name based on operators if no name
-
-    self.initial_configurations = initial_configs
-    self.population_size = population_size
-    self.operator_map = operator_map # map from parameter type to an operator function
-
-  def set_operator_map(self, operator_map):
-    self.operator_map = operator_map
-
-  @classmethod
-  def get_hyper_parameters(cls):
-    return ['population_size']
-
-  def default_generated_name(self):
-    """
-    Gets the default name for this technique based on its operator map
-
-    Name is in the format
-    classname paramname;opname;[arg1,arg2,[[kwarg1,v1][kwarg2,v2]]] paramname2;opname2;...
-    """
-    # TODO - include technique hyperparameters
-    parts = [self.base_name()]
-    for param in sorted(self.operator_map, cmp=lambda x,y: cmp(x.__name__, y.__name__)):
-      subparts = [param.__name__]
-      operator_info = self.operator_map[param]
-      subparts.append(operator_info['op_name'])
-      args = list(operator_info['args'])
-      kwargs = operator_info['kwargs']
-      args.append([(k,kwargs[k]) for k in sorted(kwargs)])
-      subparts.append(json.dumps(args, separators=(',', ':')))
-      parts.append(';'.join(subparts))
-    return ' '.join(parts)
-
-
-  def make_population_member(self, config):
-    """
-    Given a configuration, returns an object representing a single member of the
-    population with the given configuration. Meta-data about the configuration,
-    such as last selection time as a parent, can be attached to the object.
-
-    This can be overriden to return a custom population member for use in
-    :py:meth:`get_parents` and :py:meth:`update_population`
-
-    :param config: the configuration that this population member will represent
-    :return: a population member reresenting the input configuration.
-    """
-    return PopulationMember(config)
-
-  def select_parameters(self, params):
-    """
-    Given all the available parameters, return a subset of parameters to operate
-    on when generating a new configuration.
-
-    Can override this to operate on only a subset of parameters.
-
-    :param params: a list of all the available parameters
-    :return: a subset of params
-    """
-    return params
-
-  @abc.abstractmethod
-  def minimum_number_of_parents(self):
-    """
-    Return the minimum number of parents ever returned by :py:meth:`get_parents`.
-    This limits which operators can be composed with the technique. Operators
-    requiring more input configurations than the minimum number of parents will
-    result in an error.
-
-    :return: the minimum number of parents ever generated.
-    """
-    return 1
-
-  @abc.abstractmethod
-  def get_parents(self, population):
-    """
-    Given the current population, return a list of configurations that will be
-    used to generate a new configuration via operators. Returning less parents
-    than guaranteed by :py:meth:`minimum_number_of_parents` results in an error.
-
-    The parents will be passed to operators in order. If there are more parents
-    than required by an operator, extra parents will be discarded.
-
-    Note that operators mutate the first configuration passed in.
-
-    :param population: the current population in the technique
-    :return: a list of parent configurations to generate a new configuration from
-    """
-    pass
-
-  @abc.abstractmethod
-  def update_population(self, config, population):
-    """
-    Update the population given the newest configuration and current population
-    in the technique. should return the new population
-
-    :param config: the newest generated configuration
-    :param population: the current population in this iteration of the technique
-    :return: the updated population
-    """
-    pass
-
-  def get_initial_population(self):
-    """
-    Returns an initial population by passing initial configurations into
-    :py:meth:`make_population_member`
-
-    :return: an initial list of objects returned by :py:meth:`make_population_member`.
-    """
-    init_configs = self.initial_configurations
-    if not init_configs:
-      init_configs = [self.manipulator.random() for i in range(self.population_size)]
-    return [PopulationMember(config) for config in init_configs]
-
-  def lt(self, cfg_a, cfg_b):
-    """
-    Return whether cfg_a has a better objective function score than cfg_b
-
-    :param cfg_a: first configuration
-    :param cfg_b: second configuration
-    :return: True if cfg_a is better than cfg_b
-    """
-    def config(cfg):
-      return self.driver.get_configuration(cfg)
-    return self.objective.lt(config(cfg_a), config(cfg_b))
-
-  def lte(self, cfg_a, cfg_b):
-    """
-    Return whether cfg_a's objective function score is at least as good as cfg_b's
-    score
-
-    :param cfg_a: first configuration
-    :param cfg_b: second configuration
-    :return: True if cfg_a is at least as good as cfg_b
-    """
-    def config(cfg):
-      return self.driver.get_configuration(cfg)
-    return self.objective.lte(config(cfg_a), config(cfg_b))
-
-  def get_global_best_configuration(self):
-    """
-    Return the current global best configuration in the search
-
-    :return: the current global best configuration
-    """
-    if (self.driver.best_result is not None and
-        self.driver.best_result.state == 'OK'):
-      return self.manipulator.copy(self.driver.best_result.configuration.data)
-    else:
-      return self.manipulator.random()
-
-  def get_default_operator(self, param_type):
-    """
-    Given a parameter type, return a dictionary with information about the
-    operator to be used for the parameter. The returned dictionary must contain
-    the following 3 key, value pairs
-
-      1. 'op_name' - the string name of the operator
-      2. 'args' - an iterable of the non-configuration arguments in order
-      3. 'kwargs' - a dictionary from any optional arguments to their values
-
-    :return: a dictionary containing information about the operator to apply for the input parameter type
-    """
-    return {'op_name': 'op1_nop', 'args': [], 'kwargs': {}}
-
-  # HELPER METHODS FOR BUILDING OPERATOR MAP
-  @classmethod
-  def add_to_map(cls, operator_map, param_type, operator_name, *args, **kwargs):
-    """
-    A helper method for adding parameter to operator mappings into the operator
-    map.
-
-    :param operator_map: the operator map to add to
-    :param param_type: the parameter type to use the this operator on
-    :param operator_name: the string name of the operator method
-    :param *args: any non-configuration arguments to the operator
-    :param **kwargs: any keyword arguemnts for the operator
-    """
-    if(isinstance(param_type, Parameter)):
-      ptype = type(param_type)
-    elif (type(param_type) == str):
-      ptype = reduce(getattr, param_type.split("."), sys.modules[__name__])
-    else:
-      ptype = param_type;
-
-    operator_map[ptype] = {'op_name': operator_name, 'args':args, 'kwargs':kwargs}
-
-
-  def main_generator(self):
-    """
-    The primary body of the search technique.
-    Initializes an initial population and then yields configurations by applying
-    operators to get_parents.
-    """
-    min_parents = self.minimum_number_of_parents();
-    # convert a manipulator configuration to a db.models.Configuration
-    def get_driver_configuration(cfg):
-      return self.driver.get_configuration(cfg)
-
-    # initialize the population
-    population = self.get_initial_population()
-
-    # measure initial population
-    for p in population:
-      yield get_driver_configuration(p.config)
-
-    while True:
-      # get parents
-      parents = self.get_parents(population)
-      if len(parents) < min_parents:
-         log.error("%s: Number of parents returned %d is less than the guaranteed"
-                     + " minimum returned by minimum_number_of_parents() %d. ",
-                     self.name, len(parents), min_parents)
-         # fail and let other techniques work forever
-         while True:
-          yield None
-
-      params = self.select_parameters(self.manipulator.params)
-      config = self.get_new_config(parents, params)
-      yield get_driver_configuration(config)
-
-      population = self.update_population(config, population)
-
-      # safety check that population has all been tested
-      for p in population:
-        if not self.driver.has_results(get_driver_configuration(p.config)):
-          yield get_driver_configuration(p.config)
-
-  def get_new_config(self, parents, params):
-    """
-    Return a new configuration to test, given a list of parent configurations
-    This mutates the first parent
-
-    :param parents: A list of parent configurations
-    :params: A list of parameters to operate on
-    :return: The mutated configuration (first parent)
-    """
-    for param in params:
-      self.apply_operator(param, parents) #TODO
-    return parents[0]
-
-  def apply_operator(self, param, parents):
-    """
-    Apply the appropriate operator for param to parents.
-    If an operator takes less input configurations than the number of parents,
-    only the first parents are passed in. If operator takes more input configs
-    than minimum_number_of_parents, logs an error and doesn't do anything
-    """
-    x = self.get_operator(type(param))
-
-    operator_name = x['op_name']
-    if not self.is_valid_operator(type(param), operator_name):
-      # do nothing
-      return
-
-    # operator is already in valid form and starts with op1, op2, op3, op4, or opn
-    num_parents_required = operator_name[2]
-    if num_parents_required == 'n':
-      args = parents[0] + [parents[1:]]
-    else:
-      num_parents_required = int(num_parents_required)
-      args = parents[:num_parents_required]
-    args.extend(x['args'])
-
-    kwargs = x['kwargs']
-
-    getattr(param, operator_name)(*args, **kwargs)
-
-  def get_operator(self, param_type):
-    if param_type in self.operator_map:
-      return self.operator_map[param_type]
-    return self.get_default_operator(param_type)
-
-  def is_valid_operator(self, param_type, operator_name):
-    if not hasattr(param_type, operator_name):
-      log.error("%s: %s is not a valid operator for Parameter type %s",
-                self.name, operator_name, param_type.__name__)
-      return False
-
-    if operator_name[:3] not in ['op1','op2','op3','op4','opn']:
-      log.error("%s: %s is not a valid operator for Parameter type %s",
-                self.name, operator_name, param_type.__name__)
-      return False
-
-    num_parents_required = operator_name[2]
-    if num_parents_required == 'n':
-      return True
-
-    num_parents_required = int(num_parents_required)
-    minimum_number_of_parents = self.minimum_number_of_parents()
-
-    if num_parents_required > minimum_number_of_parents:
-      log.error("%s: %s for Parameter type %s requires more input configs "
-                + "than minimum number of parents, %d, produced by this technique",
-                self.name, operator_name, param_type.__name__, minimum_number_of_parents)
-      return False
-
-    return True
-
-  @classmethod
-  def generate_technique(cls, manipulator=None, *args, **kwargs):
-    """
-    generate a composable technique with random operators
-    """
-    from manipulator import composable_operators
-    # randomly select a composable technique to generate
-    t = cls(*args, **kwargs)
-    if manipulator is None:
-      return t
-
-    paramset = set()
-    for p in manipulator.params:
-      paramset.add(type(p))
-
-    # add some random operator for each param
-    operator_map = {}
-    for param in paramset:
-      operators = composable_operators(param, t.minimum_number_of_parents())
-      # TODO - sometimes use "default" operator (don't choose an operator?
-      # TODO - lower chance of picking op1_nop?
-      ComposableEvolutionaryTechnique.add_to_map(operator_map, param, random.choice(operators))
-
-    t.set_operator_map(operator_map)
-    t.use_default_generated_name()
-    return t
-
-
-class RandomThreeParentsComposableTechnique(ComposableEvolutionaryTechnique):
-  """
-  based on DifferentialEvolution
-  """
-
-  def __init__(self, cr = 0.9, must_mutate_count=1, information_sharing=1, *pargs, **kwargs):
-    super(RandomThreeParentsComposableTechnique, self).__init__(*pargs, **kwargs)
-    self.cr = cr
-    self.must_mutate_count = must_mutate_count
-    self.information_sharing = information_sharing
-
-  @classmethod
-  def get_hyper_parameters(cls):
-    return ['population_size', 'cr', 'must_mutate_count', 'information_sharing']
-
-  def minimum_number_of_parents(self):
-    return 4
-
-  def get_parents(self, population):
-    self.use_f = random.random()
-    population.sort(key=_.timestamp) # sort population by timestamp
-
-    # copy oldest
-    cfg = self.manipulator.copy(population[0].config)
-
-    shuffled_population = map(_.config, population[1:])
-    # mix in the global best configuration
-    shuffled_population += ([self.get_global_best_configuration()]
-                            * self.information_sharing)
-    random.shuffle(shuffled_population)
-
-    # return oldest configuration +_3 other configurations
-    return [cfg] + shuffled_population[0:3]
-
-  def update_population(self, config, population):
-    # replace the oldest configuration if the new one is better.
-    population.sort(key=_.timestamp)
-    if self.lt(config, population[0].config):
-      population[0].config = config
-
-    # mark that oldest configuration is updated
-    population[0].touch()
-
-    return population
-
-  def select_parameters(self, params):
-    """
-    randomly select a subset of parameters to operate on
-    """
-    random.shuffle(params)
-    ret_list = params[:self.must_mutate_count]
-    for param in params[self.must_mutate_count:]:
-      if random.random() < self.cr:
-        ret_list.append(param)
-    return ret_list
-
-  def get_default_operator(self, param_type):
-    return {'op_name': 'op4_set_linear', 'args': [1.0, self.use_f, -self.use_f], 'kwargs': {}}
-
-class GreedyComposableTechnique(ComposableEvolutionaryTechnique):
-  """
-  Always mixes in global best as parents
-  """
-  def __init__(self,
-               mutation_rate = 0.1,
-               must_mutate_count = 1,
-               population_size = 10,
-               *pargs, **kwargs):
-    super(GreedyComposableTechnique, self).__init__(*pargs, **kwargs)
-    self.mutation_rate = mutation_rate
-    self.must_mutate_count = must_mutate_count
-    self.population_size = population_size
-
-
-  @classmethod
-  def get_hyper_parameters(cls):
-    return ['mutation_rate', 'must_mutate_count']
-
-  def minimum_number_of_parents(self):
-    # specify that we will return at least 4 cfgs from get_parents
-    # this maximizes # of operators we can use
-    return 4
-
-  def get_parents(self, population):
-    population.sort(key=_.timestamp) # sort population by timestamp
-
-    # get a 50-50 mix of base and best cfgs as many operators do nothing given identical input cfgs
-    cfg = self.manipulator.copy(population[0].config)
-    # duplicate to get a total of 4 configurations to fulfill the promise in minimum_number_of_parents
-    cfgs = [self.get_global_best_configuration(), cfg]*2
-    # return a random 50-50 mix of the current configuration and global best to pass into operators
-    random.shuffle(cfgs)
-    return cfgs
-
-  def update_population(self, config, population):
-    # replace the oldest configuration if the new one is better.
-    population.sort(key=_.timestamp)
-    if self.lt(config, population[0].config):
-      population[0].config = config
-
-    # mark that oldest configuration is updated
-    population[0].touch()
-
-    return population
-
-  def select_parameters(self, params):
-    random.shuffle(params)
-    ret_list = params[:self.must_mutate_count]
-    for param in params[self.must_mutate_count:]:
-      if random.random() < self.mutation_rate:
-        ret_list.append(param)
-    return ret_list
-
-  def get_default_operator(self, param_type):
-    return {'op_name': 'op1_randomize', 'args': [], 'kwargs':{}}
-
-
-register(RandomThreeParentsComposableTechnique(name='ComposableDiffEvolution',
-                                                 population_size=30))
-register_generator(RandomThreeParentsComposableTechnique)
-register_generator(GreedyComposableTechnique)
-
-
-op_map = {}
-ComposableEvolutionaryTechnique.add_to_map(op_map,
-                                      PermutationParameter,
-                                      "op3_cross", xchoice='op3_cross_CX')
-ComposableEvolutionaryTechnique.add_to_map(op_map,
-                                      "FloatArray",
-                                      "op3_cross", strength=0.4)
-register(RandomThreeParentsComposableTechnique(name='ComposableDiffEvolutionCX',
-                                                 operator_map=op_map,
-                                                 population_size=30))
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/differentialevolution.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/differentialevolution.py
deleted file mode 100644
index cecffc460c5cdbef184fc244a70f9a6af251bddd..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/differentialevolution.py
+++ /dev/null
@@ -1,148 +0,0 @@
-import random
-import time
-import logging
-from fn import _
-from technique import register
-from technique import SearchTechnique
-
-log = logging.getLogger(__name__)
-log.setLevel(logging.WARNING)
-
-
-class PopulationMember(object):
-  def __init__(self, config, submitted=True):
-    self.config = config
-    self.submitted = submitted
-    self.timestamp = time.time()
-    self.candidate_replacement = None
-
-  def touch(self):
-    self.timestamp = time.time()
-
-
-class DifferentialEvolution(SearchTechnique):
-  """
-  based on http://cci.lbl.gov/cctbx_sources/scitbx/differential_evolution.py
-  """
-
-  def __init__(self,
-               population_size=30,
-               cr=0.9,  # crossover rate
-               n_cross=1,  # force at least 1 to crossover
-               information_sharing=1,  # number token sharing pop members
-               duplicate_retries=5,  # how many times to retry on duplicate
-               *pargs, **kwargs):
-
-    self.population_size = population_size
-    self.cr = cr
-    self.n_cross = n_cross
-    self.information_sharing = information_sharing
-    self.population = None
-    self.duplicate_retries = duplicate_retries
-    self.limit = None
-    super(DifferentialEvolution, self).__init__(*pargs, **kwargs)
-
-  @classmethod
-  def get_hyper_parameters(cls):
-    return ['population_size', 'cr', 'n_cross', 'information_sharing']
-
-  def initial_population(self):
-    self.population = [PopulationMember(
-        self.driver.get_configuration(
-            self.manipulator.random()), submitted=False)
-        for z in xrange(self.population_size)]
-
-  def oldest_pop_member(self):
-    # since tests are run in parallel, exclude things with a replacement pending
-    pop_without_replacements = filter(lambda x: x.candidate_replacement is None,
-                                      self.population)
-    if not pop_without_replacements:
-      # everything has a pending replacement
-      return None
-    pop_without_replacements.sort(key=_.timestamp)
-    return pop_without_replacements[0]
-
-  def desired_configuration(self):
-    """
-    return a cfg that we should test,
-    """
-    if not self.population:
-      # first time called
-      self.initial_population()
-
-    # make sure initial population is completely submitted
-    for p in self.population:
-      if not p.submitted:
-        p.submitted = True
-        if p is self.population[-1]:
-          log.info('initial population testing done')
-        return p.config
-
-    # pp is member of population to be replaced
-    oldest_pop_member = self.oldest_pop_member()
-    if not oldest_pop_member:
-      return None
-
-    config = None
-    for retry in xrange(self.duplicate_retries):
-      config = self.driver.get_configuration(
-          self.create_new_configuration(oldest_pop_member))
-      if not self.driver.has_results(config):
-        break
-      # new configuration would have been a duplicate, try again
-
-    oldest_pop_member.touch()  # move to back of the line for next replacement
-    oldest_pop_member.candidate_replacement = config
-    self.limit = self.driver.objective.limit_from_config(
-        oldest_pop_member.config)
-    return oldest_pop_member.candidate_replacement
-
-  def create_new_configuration(self, parent_pop_member):
-    cfg = self.manipulator.copy(parent_pop_member.config.data)
-    cfg_params = self.manipulator.proxy(cfg)
-
-    # pick 3 random parents, not pp
-    shuffled_pop = list(set(self.population) - set([parent_pop_member]))
-
-    # share information with other techniques
-    if self.driver.best_result:
-      shuffled_pop += ([PopulationMember(self.driver.best_result.configuration)]
-                       * self.information_sharing)
-
-    random.shuffle(shuffled_pop)
-    x1, x2, x3 = map(_.config.data, shuffled_pop[0:3])
-
-    use_f = random.random() / 2.0 + 0.5
-
-    params = self.manipulator.param_names(cfg, x1, x2, x3)
-    random.shuffle(params)
-    for i, k in enumerate(params):
-      if i < self.n_cross or random.random() < self.cr:
-        # cfg = x1 + use_f*(x2 - x3)
-        cfg_params[k].op4_set_linear(x1, x2, x3, 1.0, use_f, -use_f)
-
-    return cfg
-
-  def handle_requested_result(self, result):
-    """called when new results are added"""
-    for p in self.population:
-      if p.candidate_replacement == result.configuration:
-        if self.objective.lt(p.candidate_replacement, p.config):
-          # candidate replacement was better, replace it!
-          p.config = p.candidate_replacement
-          log.info('better point')
-        p.candidate_replacement = None
-
-
-class DifferentialEvolutionAlt(DifferentialEvolution):
-  def __init__(self, cr=0.2, **kwargs):
-    kwargs['cr'] = cr
-    super(DifferentialEvolutionAlt, self).__init__(**kwargs)
-
-
-register(DifferentialEvolution())
-register(DifferentialEvolutionAlt())
-register(DifferentialEvolution(population_size=100, cr=0.2,
-                               name='DifferentialEvolution_20_100'))
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/driver.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/driver.py
deleted file mode 100644
index 7924e36e6fbc772e375ce344e8de919f66e8c6b4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/driver.py
+++ /dev/null
@@ -1,301 +0,0 @@
-import argparse
-import copy
-import logging
-import os
-import sys
-
-from datetime import datetime
-from fn import _
-from opentuner.driverbase import DriverBase
-from opentuner.resultsdb.models import Configuration
-from opentuner.resultsdb.models import DesiredResult
-from opentuner.resultsdb.models import Result
-from opentuner.resultsdb.models import BanditInfo
-from opentuner.resultsdb.models import BanditSubTechnique
-from opentuner.search import plugin
-from opentuner.search import technique
-from opentuner.search.bandittechniques import AUCBanditMetaTechnique
-
-log = logging.getLogger(__name__)
-
-argparser = argparse.ArgumentParser(add_help=False)
-argparser.add_argument('--test-limit', type=int, default=5000,
-                       help='stop tuning after given tests count')
-argparser.add_argument('--stop-after', type=float,
-                       help='stop tuning after given seconds')
-argparser.add_argument('--parallelism', type=int, default=8,
-                       help='how many tests to support at once')
-argparser.add_argument('--pipelining', type=int, default=0,
-                       help='how long a delay (in generations) before results are available')
-argparser.add_argument('--bail-threshold', type=int, default=5000,
-                       help='abort if no requests have been made in X generations')
-argparser.add_argument('--no-dups', action='store_true',
-                       help='don\'t print out warnings for duplicate requests')
-argparser.add_argument('--seed-configuration', action='append', default=[],
-                       metavar='FILENAME', help="""
-                           Start search at a given configuration.  Can be
-                           specified multiple times.  Configurations are loaded
-                           with ConfigurationManipulator.load_from_file()
-                           and file format is detected from extension.""")
-
-
-class SearchDriver(DriverBase):
-  """
-  controls the search process managing root_technique and creating
-  DesiredResults
-  """
-
-  def __init__(self, manipulator, extra_seeds=None, extra_criteria=None, **kwargs):
-    super(SearchDriver, self).__init__(**kwargs)
-    if extra_seeds is None:
-      extra_seeds = []
-    self.manipulator = manipulator
-    self.wait_for_results = self.tuning_run_main.results_wait
-    self.commit = self.tuning_run_main.commit
-    self.extra_criteria = extra_criteria
-
-    self.generation = 0
-    self.test_count = 0
-    self.plugins = plugin.get_enabled(self.args)
-    self.pending_result_callbacks = list()  # (DesiredResult, function) tuples
-    # deepcopy is required to have multiple tuning runs in a single process
-    if self.args.list_techniques:
-      techniques, generators = technique.all_techniques()
-      for t in techniques:
-        print t.name
-      sys.exit(0)
-
-    if self.args.generate_bandit_technique:
-      # generate a bandit
-      self.root_technique = AUCBanditMetaTechnique.generate_technique(manipulator)
-    else:
-      self.root_technique = copy.deepcopy(technique.get_root(self.args))
-
-    if isinstance(self.root_technique, AUCBanditMetaTechnique):
-      self.session.flush()
-      info = BanditInfo(tuning_run=self.tuning_run,
-                        c=self.root_technique.bandit.C,
-                        window=self.root_technique.bandit.window,)
-      self.session.add(info)
-      for t in self.root_technique.techniques:
-        subtechnique = BanditSubTechnique(bandit_info=info,
-                                          name=t.name)
-        self.session.add(subtechnique)
-
-
-    self.objective.set_driver(self)
-    self.pending_config_ids = set()
-    self.best_result = None
-    self.new_results = []
-
-    for t in self.plugins:
-      t.set_driver(self)
-    self.root_technique.set_driver(self)
-    self.seed_cfgs = list(extra_seeds)
-    for cfg_filename in reversed(self.args.seed_configuration):
-      if os.path.exists(cfg_filename):
-        self.seed_cfgs.append(manipulator.load_from_file(cfg_filename))
-      else:
-        log.error('no such file for --seed-configuration %s', cfg_filename)
-
-    self.plugins.sort(key=_.priority)
-
-  def add_plugin(self, p):
-    if p in self.plugins:
-      return
-    self.plugins.append(p)
-    self.plugins.sort(key=_.priority)
-    p.set_driver(self)
-
-  def convergence_criteria(self):
-    """returns true if the tuning process should stop"""
-    if self.args.stop_after:
-      elapsed = (datetime.now() - self.tuning_run.start_date)
-      try:
-        elapsed = elapsed.total_seconds()
-      except:  # python 2.6
-        elapsed = elapsed.days * 86400 + elapsed.seconds
-      if elapsed > self.args.stop_after:
-          return True
-    if self.test_count > self.args.test_limit:
-        return True    
-    if self.extra_criteria:
-        if self.extra_criteria(self.new_results):
-            return True
-    return False
-
-  def register_result_callback(self, desired_result, callback):
-    if desired_result.result is not None:
-      callback(desired_result.result)
-    else:
-      self.pending_result_callbacks.append((desired_result, callback))
-
-  def result_callbacks(self):
-    pending = self.pending_result_callbacks
-    self.pending_result_callbacks = list()
-    for dr, callback in pending:
-      if dr.result is not None:
-        callback(dr.result)
-        continue
-      elif self.generation - dr.generation > self.args.pipelining:
-        # see if we can find a result
-        results = self.results_query(config=dr.configuration).all()
-        log.warning("Result callback %d (requestor=%s) pending for "
-                    "%d generations %d results available",
-                    dr.id, dr.requestor, self.generation - dr.generation,
-                    len(results))
-        if len(results):
-          dr.result = results[0]
-          callback(dr.result)
-          continue
-      # try again later
-      self.pending_result_callbacks.append((dr, callback))
-
-  def has_results(self, config):
-    return self.results_query(config=config).count() > 0
-
-  def run_generation_techniques(self):
-    tests_this_generation = 0
-    self.plugin_proxy.before_techniques()
-    for z in xrange(self.args.parallelism):
-      
-      if self.seed_cfgs:
-        config = self.get_configuration(self.seed_cfgs.pop())
-        dr = DesiredResult(configuration=config,
-                           requestor='seed',
-                           generation=self.generation,
-                           request_date=datetime.now(),
-                           tuning_run=self.tuning_run)
-      else:
-        dr = self.root_technique.desired_result()
-      if dr is None or dr is False:
-        log.debug("no desired result, skipping to testing phase")
-        break
-      self.session.flush()  # populate configuration_id
-      duplicates = (self.session.query(DesiredResult)
-                    .filter_by(tuning_run=self.tuning_run,
-                               configuration_id=dr.configuration_id)
-                    .filter(DesiredResult.id != dr.id)
-                    .order_by(DesiredResult.request_date)
-                    .limit(1).all())
-      self.session.add(dr)
-      if len(duplicates):
-        if not self.args.no_dups:
-          log.warning("duplicate configuration request #%d %s/%s %s",
-                      self.test_count,
-                      dr.requestor,
-                      duplicates[0].requestor,
-                      'OLD' if duplicates[0].result else 'PENDING')
-        self.session.flush()
-        desired_result_id = dr.id
-
-        def callback(result):
-          dr = self.session.query(DesiredResult).get(desired_result_id)
-          dr.result = result
-          dr.state = 'COMPLETE'
-          dr.start_date = datetime.now()
-
-        self.register_result_callback(duplicates[0], callback)
-      else:
-        log.debug("desired result id=%d, cfg=%d", dr.id, dr.configuration_id)
-        dr.state = 'REQUESTED'
-      self.test_count += 1
-      tests_this_generation += 1
-    self.plugin_proxy.after_techniques()
-    return tests_this_generation
-
-  def process_new_results(self):
-    self.new_results = []
-    for result in (self.results_query()
-                       .filter_by(was_new_best=None)
-                       .order_by(Result.collection_date)):
-      self.plugin_proxy.on_result(result)
-      self.new_results.append(result)
-      if self.best_result is None:
-        self.best_result = result
-        result.was_new_best = True
-      elif self.objective.lt(result, self.best_result):
-        self.best_result = result
-        result.was_new_best = True
-        self.plugin_proxy.on_new_best_result(result)
-      else:
-        result.was_new_best = False
-    self.result_callbacks()
-
-  def run_generation_results(self, offset=0):
-    self.commit()
-    self.plugin_proxy.before_results_wait()
-    self.wait_for_results(self.generation + offset)
-    self.plugin_proxy.after_results_wait()
-    self.process_new_results()
-
-  @property
-  def plugin_proxy(self):
-    """
-    forward any method calls on the returned object to all plugins
-    """
-    plugins = self.plugins
-
-    class PluginProxy(object):
-      def __getattr__(self, method_name):
-        def plugin_method_proxy(*args, **kwargs):
-          rv = []
-          for plugin in plugins:
-            rv.append(getattr(plugin, method_name)(*args, **kwargs))
-          return filter(lambda x: x is not None, rv)
-
-        return plugin_method_proxy
-
-    return PluginProxy()
-
-  def get_configuration(self, cfg):
-    """called by SearchTechniques to create Configuration objects"""
-    self.manipulator.normalize(cfg)
-    hashv = self.manipulator.hash_config(cfg)
-    config = Configuration.get(self.session,self.program, hashv, cfg)
-    return config
-
-  def main(self):
-    self.plugin_proxy.set_driver(self)
-    self.plugin_proxy.before_main()
-
-    no_tests_generations = 0
-
-    # prime pipeline with tests
-    for z in xrange(self.args.pipelining):
-      self.run_generation_techniques()
-      self.generation += 1
-
-    while not self.convergence_criteria():
-      if self.run_generation_techniques() > 0:
-        no_tests_generations = 0
-      elif no_tests_generations <= self.args.bail_threshold:
-        no_tests_generations += 1
-      else:
-        break
-      self.run_generation_results(offset=-self.args.pipelining)
-      self.generation += 1
-
-    self.plugin_proxy.after_main()
-
-  def external_main_begin(self):
-    self.plugin_proxy.set_driver(self)
-    self.plugin_proxy.before_main()
-
-  def external_main_generation(self):
-    if self.generation > 0:
-      self.plugin_proxy.after_results_wait()
-    self.process_new_results()
-    self.run_generation_techniques()
-    self.commit()
-    self.plugin_proxy.before_results_wait()
-
-  def external_main_end(self):
-    self.plugin_proxy.after_main()
-
-
-
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/evolutionarytechniques.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/evolutionarytechniques.py
deleted file mode 100644
index e663ac1345cfbd0823df2231fc3e8040298059f9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/evolutionarytechniques.py
+++ /dev/null
@@ -1,153 +0,0 @@
-import abc
-import copy
-import random
-from technique import SearchTechnique
-from opentuner.search import technique
-
-class EvolutionaryTechnique(SearchTechnique):
-  def __init__(self,
-               mutation_rate = 0.1,
-               crossover_rate = 0.0,
-               must_mutate_count = 1,
-               *pargs, **kwargs):
-    super(EvolutionaryTechnique, self).__init__(*pargs, **kwargs)
-    self.mutation_rate = mutation_rate
-    self.crossover_rate = crossover_rate
-    self.must_mutate_count = must_mutate_count
-
-  @classmethod
-  def get_hyper_parameters(cls):
-    return ['mutation_rate', 'crossover_rate', 'must_mutate_count']
-
-
-  def desired_configuration(self):
-    """
-    return a (cfg, priority) that we should test,
-    through random mutation and crossover
-    """
-    #TODO: set limit value
-
-    parents = self.selection()
-    parents = map(copy.deepcopy, parents)
-    parent_hashes = map(self.manipulator.hash_config, parents)
-
-    if len(parents) > 1:
-      cfg = self.crossover(parents)
-    else:
-      cfg = parents[0]
-
-    for z in xrange(10): #retries
-      self.mutation(cfg)
-      if self.manipulator.hash_config(cfg) in parent_hashes:
-        continue # try again
-      return cfg
-
-  def mutation(self, cfg):
-    """
-    mutate cfg in place
-    """
-    params = self.manipulator.parameters(cfg)
-    random.shuffle(params)
-    for param in params[:self.must_mutate_count]:
-      self.mutate_param(cfg, param)
-    for param in params[self.must_mutate_count:]:
-      if random.random() < self.mutation_rate:
-        self.mutate_param(cfg, param)
-
-  def mutate_param(self, cfg, param):
-    """
-    mutate single parameter of cfg in place
-    """
-    param.op1_randomize(cfg)
-
-  def crossover(self):
-    raise Exception('Not implemented')
-
-  def selection(self):
-    """return a list of parent configurations to use"""
-    if random.random() < self.crossover_rate:
-      return [self.select(),
-              self.select()]
-    else:
-      return [self.select()]
-
-  @abc.abstractmethod
-  def select(self):
-    """return a single random parent configuration"""
-    return None
-
-class GreedySelectionMixin(object):
-  """
-  EvolutionaryTechnique mixin for greedily selecting the best known
-  configuration
-  """
-  def select(self):
-    """return a single random parent configuration"""
-    if (self.driver.best_result is not None and
-        self.driver.best_result.state == 'OK'):
-      return self.driver.best_result.configuration.data
-    else:
-      return self.manipulator.random()
-
-class NormalMutationMixin(object):
-  """
-  Mutate primitive parameters according to normal distribution
-  """
-
-  def __init__(self, sigma = 0.1, *pargs, **kwargs):
-    super(NormalMutationMixin, self).__init__(*pargs, **kwargs)
-    self.sigma = sigma
-
-  def mutate_param(self, cfg, param):
-    """
-    mutate single parameter of cfg in place
-    """
-    if param.is_primitive():
-      param.op1_normal_mutation(cfg, self.sigma)
-    else:
-      random.choice(param.manipulators(cfg))(cfg)
-
-
-class CrossoverMixin(object):
-  def __init__(self, crossover,   *pargs, **kwargs):
-    super(CrossoverMixin, self).__init__(*pargs, **kwargs)
-    self.crossover_op = crossover
-    self.name = 'ga-'+crossover.replace("op3_cross_","")
-
-  def crossover(self, cfgs):
-    """
-    Crossover the first permtation parameter, if found, of two parents and
-    return one offspring cfg
-    """
-    cfg1, cfg2, = cfgs
-    new = self.manipulator.copy(cfg1)
-    params = self.manipulator.parameters(cfg1)
-    for param in params:
-      if param.is_permutation() and param.size>6:
-        getattr(param, self.crossover_op)(new, cfg1, cfg2, d=param.size/3)
-    return new
-
-
-class UniformGreedyMutation(GreedySelectionMixin, EvolutionaryTechnique):
-  pass
-
-class NormalGreedyMutation(NormalMutationMixin, GreedySelectionMixin, EvolutionaryTechnique):
-  pass
-
-class GA(CrossoverMixin, UniformGreedyMutation):
-  pass
-
-technique.register(GA(crossover = 'op3_cross_OX3', mutation_rate=0.10, crossover_rate=0.8))
-technique.register(GA(crossover = 'op3_cross_OX1', mutation_rate=0.10,crossover_rate=0.8))
-technique.register(GA(crossover = 'op3_cross_PX', mutation_rate=0.10, crossover_rate=0.8))
-technique.register(GA(crossover = 'op3_cross_CX', mutation_rate=0.10, crossover_rate=0.8))
-technique.register(GA(crossover = 'op3_cross_PMX', mutation_rate=0.10, crossover_rate=0.8))
-technique.register(UniformGreedyMutation(name='ga-base', mutation_rate=0.10))
-
-technique.register(UniformGreedyMutation(name='UniformGreedyMutation05', mutation_rate=0.05))
-technique.register(UniformGreedyMutation(name='UniformGreedyMutation10', mutation_rate=0.10))
-technique.register(UniformGreedyMutation(name='UniformGreedyMutation20', mutation_rate=0.20))
-technique.register(NormalGreedyMutation(name='NormalGreedyMutation05', mutation_rate=0.05))
-technique.register(NormalGreedyMutation(name='NormalGreedyMutation10', mutation_rate=0.10))
-technique.register(NormalGreedyMutation(name='NormalGreedyMutation20', mutation_rate=0.20))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/globalGA.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/globalGA.py
deleted file mode 100644
index e9b1f711746bbd42d0fb6e7ca3972d467c703e66..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/globalGA.py
+++ /dev/null
@@ -1,125 +0,0 @@
-import abc
-import copy
-import random
-from technique import SearchTechnique
-from opentuner.search import technique
-
-class GlobalEvolutionaryTechnique(SearchTechnique):
-  def __init__(self,
-               mutation_rate = 0.1,
-               crossover_rate = 0.0,
-               must_mutate_count = 1,
-	             crossover_strength = 0.1,
-               *pargs, **kwargs):
-    super(GlobalEvolutionaryTechnique, self).__init__(*pargs, **kwargs)
-    self.mutation_rate = mutation_rate
-    self.crossover_rate = crossover_rate
-    self.must_mutate_count = must_mutate_count
-    self.crossover_strength = crossover_strength
-
-  @classmethod
-  def get_hyper_parameters(cls):
-    return ['mutation_rate', 'crossover_rate', 'must_mutate_count', 'crossover_strength']
-
-  def desired_configuration(self):
-    """
-    return a (cfg, priority) that we should test,
-    through random mutation and crossover
-    """
-    #TODO: set limit value
-
-    parents = self.selection()
-    parents = map(copy.deepcopy, parents)
-    parent_hashes = map(self.manipulator.hash_config, parents)
-
-    if len(parents) > 1:
-      cfg = self.crossover(parents)
-    else:
-      cfg = parents[0]
-
-    for z in xrange(10): #retries
-      self.mutation(cfg)
-      if self.manipulator.hash_config(cfg) in parent_hashes:
-        continue # try again
-      return cfg
-
-  def mutation(self, cfg):
-    """
-    mutate cfg in place
-    """
-    params = self.manipulator.parameters(cfg)
-    random.shuffle(params)
-    for param in params[:self.must_mutate_count]:
-      self.mutate_param(cfg, param)
-    for param in params[self.must_mutate_count:]:
-      if random.random() < self.mutation_rate:
-        self.mutate_param(cfg, param)
-
-  def mutate_param(self, cfg, param):
-    """
-    mutate single parameter of cfg in place
-    """
-    param.op1_randomize(cfg)
-
-  def crossover(self, cfgs):
-    cfg1, cfg2, = cfgs
-    new = self.manipulator.copy(cfg1)
-    params = self.manipulator.parameters(cfg1)
-    random.shuffle(params)
-    d = int(self.crossover_strength*len(params))
-    for param in params[:d]:
-      param.set_value(new, param.get_value(cfg2))
-    return new
-
-  def selection(self):
-    """return a list of parent configurations to use"""
-    if random.random() < self.crossover_rate:
-      return [self.select(),
-              self.select()]
-    else:
-      return [self.select()]
-
-  @abc.abstractmethod
-  def select(self):
-    """return a single random parent configuration"""
-    return None
-
-class GreedySelectionMixin(object):
-  """
-  EvolutionaryTechnique mixin for greedily selecting the best known
-  configuration
-  """
-  def select(self):
-    """return a single random parent configuration"""
-    if (self.driver.best_result is not None and
-        self.driver.best_result.state == 'OK'):
-      return self.driver.best_result.configuration.data
-    else:
-      return self.manipulator.random()
-
-class NormalMutationMixin(object):
-  """
-  Mutate primitive parameters according to normal distribution
-  """
-
-  def __init__(self, sigma = 0.1, *pargs, **kwargs):
-    super(NormalMutationMixin, self).__init__(*pargs, **kwargs)
-    self.sigma = sigma
-
-  def mutate_param(self, cfg, param):
-    """
-    mutate single parameter of cfg in place
-    """
-    if param.is_primitive():
-      param.op1_normal_mutation(cfg, self.sigma)
-    else:
-      random.choice(param.manipulators(cfg))(cfg)
-
-
-class UniformGreedyMutation(GreedySelectionMixin, GlobalEvolutionaryTechnique):
-  pass
-
-class NormalGreedyMutation(NormalMutationMixin, GreedySelectionMixin, GlobalEvolutionaryTechnique):
-  pass
-
-technique.register(NormalGreedyMutation( crossover_rate=0.5, crossover_strength=0.2, name='GGA'))
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/manipulator.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/manipulator.py
deleted file mode 100755
index decd476bf37ec2c12d2578b9b8266e5f8c705b12..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/manipulator.py
+++ /dev/null
@@ -1,1853 +0,0 @@
-# vim: tabstop=2 shiftwidth=2 softtabstop=2 expandtab autoindent smarttab
-import abc
-import collections
-import copy
-import hashlib
-import json
-import logging
-import math
-import os
-import pickle
-import random
-from fn import _
-import argparse
-from datetime import datetime
-import numpy
-import inspect
-import sys
-
-log = logging.getLogger(__name__)
-argparser = argparse.ArgumentParser(add_help=False)
-argparser.add_argument('--list-params', '-lp',
-                       help='list available parameter classes')
-
-
-class ConfigurationManipulatorBase(object):
-  """
-  abstract interface for objects used by search techniques to mutate
-  configurations
-  """
-  __metaclass__ = abc.ABCMeta
-
-  # List of file formats, which can be extended by subclasses. Used in
-  # write_to_file() and load_from_file().  Objects in list must define
-  # load(fd) and dump(cfg, fd).
-  FILE_FORMATS = {'default': json, 'json': json,
-                  'pickle': pickle, 'pk': pickle}
-
-  def validate(self, config):
-    """is the given config valid???"""
-    return all(map(_.validate(config), self.parameters(config)))
-
-  def normalize(self, config):
-    """mutate config into canonical form"""
-    for param in self.parameters(config):
-      param.normalize(config)
-
-  def set_search_driver(self, search_driver):
-    """called exactly once during setup"""
-    pass
-
-  def copy(self, config):
-    """produce copy of config"""
-    return copy.deepcopy(config)
-
-  def parameters_dict(self, config):
-    """convert self.parameters() to a dictionary by name"""
-    return dict([(p.name, p) for p in self.parameters(config)])
-
-  def param_names(self, *args):
-    """return union of parameter names in args"""
-    return sorted(reduce(set.union,
-                         [set(map(_.name, self.parameters(cfg)))
-                          for cfg in args]))
-
-  def linear_config(self, a, cfg_a, b, cfg_b, c, cfg_c):
-    """return a configuration that is a linear combination of 3 other configs"""
-    dst = self.copy(cfg_a)
-    dst_params = self.proxy(dst)
-    for k in self.param_names(dst, cfg_a, cfg_b, cfg_c):
-      dst_params[k].op4_set_linear(cfg_a, cfg_b, cfg_c, a, b, c)
-    return dst
-
-  def _get_serializer(self, filename, format=None):
-    """
-    Extract the correct file format serializer from self.FILE_FORMATS.
-    Guess the format by extension if one is not given.
-    """
-    if format is None:
-      format = os.path.splitext(filename)[1].lower().replace('.', '')
-    if format not in self.FILE_FORMATS:
-      serializer = self.FILE_FORMATS['default']
-      if len(self.FILE_FORMATS) > 1:
-        log.warning('Unknown file format "%s", using "%s" instead', format,
-                    serializer.__name__)
-    else:
-      serializer = self.FILE_FORMATS[format]
-    return serializer
-
-  def save_to_file(self, cfg, filename, format=None):
-    """
-    Write cfg to filename.  Guess the format by extension if one is not given.
-    """
-    with open(filename, 'a+') as fd:
-      self._get_serializer(filename, format).dump(cfg, fd)
-
-  def load_from_file(self, filename, format=None):
-    """
-    Read cfg from filename.  Guess the format by extension if one is not given.
-    """
-    with open(filename, 'rb') as fd:
-      return self._get_serializer(filename, format).load(fd)
-
-  def proxy(self, cfg):
-    return ManipulatorProxy(self, cfg)
-
-  @abc.abstractmethod
-  def random(self):
-    """produce a random initial configuration"""
-    return
-
-  @abc.abstractmethod
-  def parameters(self, config):
-    """return a list of of Parameter objects"""
-    return list()
-
-  @abc.abstractmethod
-  def hash_config(self, config):
-    """produce unique hash value for the given config"""
-    return
-
-
-class ConfigurationManipulator(ConfigurationManipulatorBase):
-  """
-  a configuration manipulator using a fixed set of parameters and storing
-  configs in a dict-like object
-  """
-
-  def __init__(self, params=None, config_type=dict, seed_config=None, **kwargs):
-    if params is None:
-      params = []
-    self.params = list(params)
-    self.config_type = config_type
-    self.search_driver = None
-    self._seed_config = seed_config
-    super(ConfigurationManipulator, self).__init__(**kwargs)
-    for p in self.params:
-      p.parent = self
-
-  def add_parameter(self, p):
-    p.set_parent(self)
-    self.params.append(p)
-
-    #TODO sub parameters should be recursed on
-    # not currently an issue since no doubly-nested sub-parameters
-    sub_params = p.sub_parameters()
-    for sp in sub_params:
-      sp.set_parent(p)
-    self.params.extend(sub_params)
-
-  def set_search_driver(self, search_driver):
-    self.search_driver = search_driver
-
-  def seed_config(self):
-    """produce a fixed seed configuration"""
-    if self._seed_config:
-      cfg = copy.deepcopy(self._seed_config)
-    else:
-      cfg = self.config_type()
-      for p in self.params:
-        if not isinstance(p.name, str) or '/' not in p.name:
-          cfg[p.name] = p.seed_value()
-    return cfg
-
-  def random(self):
-    """produce a random configuration"""
-    cfg = self.seed_config()
-    for p in self.parameters(cfg):
-      p.op1_randomize(cfg)
-    return cfg
-
-  def parameters(self, config):
-    """return a list of Parameter objects"""
-    if type(config) is not self.config_type:
-      log.error("wrong type, expected %s got %s",
-                str(self.config_type),
-                str(type(config)))
-      raise TypeError()
-    return self.params
-
-  def parameters_to_json(self):
-    """
-    output information about the parameters in this manipulator in json format:
-    [ConfigurationManipulator,{pinfo:count,pinfo:count ...}]
-    where pinfo has a similar form to describe the parameter's sub-parameters:
-    [param_name,{pinfo:count,pinfo:count ...}]
-    """
-    def param_info_to_json(param, sub_parameters):
-      """
-      recursively output information about a parameter and its subparameters in a json format:
-
-      [parameter_name, {subparam_info:count,subparam_info:count,...}]
-      or if no subparams
-      [parameter_name,{}]
-
-      where subparam_info are sorted alphabetically. Note we can't directly use json since
-      sets/dictionaries aren't always ordered by key
-      """
-      sub_parameter_counts = {}
-      # build the string
-      if isinstance(param, str):
-        param_name = param
-      else:
-        param_name = param.__class__.__name__
-      out = ['[', param_name, ',{']
-
-      if len(sub_parameters) > 0:
-        # count sub params
-        for sp in sub_parameters:
-          spout = param_info_to_json(sp, sp.sub_parameters())
-          sub_parameter_counts[spout] = sub_parameter_counts.get(spout, 0) + 1
-        # add the count map in sorted order
-        for sp in sorted(sub_parameter_counts):
-          out.append(sp)
-          out.append(':')
-          out.append(str(sub_parameter_counts[sp]))
-          out.append(',')
-        out.pop() # remove trailing comma
-
-      out.append('}]')
-      return ''.join(out)
-
-    # filter out subparameters to avoid double counting
-    params = [p for p in self.params if p.parent is self]
-    return param_info_to_json(self, params)
-
-  def hash_config(self, config):
-    """produce unique hash value for the given config"""
-    m = hashlib.sha256()
-    params = list(self.parameters(config))
-    params.sort(key=_.name)
-    for i, p in enumerate(params):
-      m.update(str(p.name))
-      m.update(p.hash_value(config))
-      m.update(str(i))
-      m.update("|")
-    return m.hexdigest()
-
-  def search_space_size(self):
-    """estimate the size of the search space, not precise"""
-    return reduce(_ * _, [x.search_space_size() for x in self.params])
-
-  def difference(self, cfg1, cfg2):
-    cfg = self.copy(cfg1)
-    for param in self.parameters(cfg1):
-      if param.is_primitive(cfg1):
-        # TODO: check range
-        param.set_value(cfg, param.get_value(cfg1) - param.get_value(cfg2))
-      else:
-        pass
-    return cfg
-
-  def applySVs(self, cfg, sv_map, args, kwargs):
-    """
-    Apply operators to each parameter according to given map. Updates cfg.
-    Parameters with no operators specified are not updated.
-    cfg: configuration data
-    sv_map: python dict that maps string parameter name to class method name
-    arg_map: python dict that maps string parameter name to class method
-    arguments
-    """
-    # TODO: check consistency between sv_map and cfg
-    param_dict = self.parameters_dict(cfg)
-    for pname in self.param_names(cfg):
-      param = param_dict[pname]
-      getattr(param, sv_map[pname])(cfg, *args[pname], **kwargs[pname])
-
-
-class Parameter(object):
-  """
-  abstract base class for parameters in a ConfigurationManipulator
-  """
-  __metaclass__ = abc.ABCMeta
-
-  def __init__(self, name):
-    self.name = name
-    self.parent = None
-    super(Parameter, self).__init__()
-
-  def _to_storage_type(self, val):
-    """hook to support transformation applied while stored"""
-    return val
-
-  def _from_storage_type(self, sval):
-    """hook to support transformation applied while stored"""
-    return sval
-
-  def _read_node(self, config):
-    """hook to support different storage structures"""
-    node = config
-    if not isinstance(self.name, str):
-      return node, self.name
-    name_parts = self.name.split('/')
-    for part in name_parts[:-1]:
-      if isinstance(node, list):
-        part = int(part)
-      node = node[part]
-    part = name_parts[-1]
-    if isinstance(node, list):
-      part = int(part)
-    return node, part
-
-  def _get(self, config):
-    """hook to support different storage structures"""
-    node, part = self._read_node(config)
-    return self._from_storage_type(node[part])
-
-  def _set(self, config, v):
-    """hook to support different storage structures"""
-    node, part = self._read_node(config)
-    node[part] = self._to_storage_type(v)
-
-  def set_parent(self, manipulator):
-    self.parent = manipulator
-
-  def validate(self, config):
-    """is the given config valid???"""
-    return True
-
-  def is_primitive(self, ignored=None):
-    return isinstance(self, PrimitiveParameter)
-
-  def is_permutation(self, ignored=None):
-    return isinstance(self, PermutationParameter)
-
-  def manipulators(self, config):
-    """
-    a list of manipulator functions to change this value in the config
-    manipulators must be functions that take a config and change it in place
-
-    default implementation just has op1_randomize as only operation
-    """
-    return [self.op1_randomize]
-
-  def normalize(self, config):
-    """
-    mutate this parameter into a canonical form
-    """
-    pass
-
-  def sub_parameters(self):
-    """
-    additional parameters added with this parameter
-    """
-    return []
-
-  @abc.abstractmethod
-  def op1_randomize(self, cfg):
-    """
-    Set this parameter's value in a configuration to a random value
-
-    :param config: the configuration to be changed
-    """
-    pass
-
-  @abc.abstractmethod
-  def seed_value(self):
-    """some legal value of this parameter (for creating initial configs)"""
-    return
-
-  @abc.abstractmethod
-  def copy_value(self, src, dst):
-    """copy the value of this parameter from src to dst config"""
-    pass
-
-  @abc.abstractmethod
-  def same_value(self, cfg1, cfg2):
-    """test if cfg1 and cfg2 have the same value of this parameter"""
-    return
-
-  @abc.abstractmethod
-  def hash_value(self, config):
-    """produce unique hash for this value in the config"""
-    return
-
-  @abc.abstractmethod
-  def op4_set_linear(self, cfg, cfg_a, cfg_b, cfg_c, a, b, c):
-    """
-    Sets the parameter value in a configuration to a linear combination of 3
-    other configurations: :math:`a*cfg_a + b*cfg_b + c*cfg_c`
-
-    :param cfg: the configuration to be changed
-    :param cfg_a: a parent configuration
-    :param cfg_b: a parent configuration
-    :param cfg_c: a parent configuration
-    :param a: weight for cfg_a
-    :param b: weight for cfg_b
-    :param c: weight for cfg_c
-    """
-    pass
-
-  def search_space_size(self):
-    return 1
-
-  def op1_nop(self, cfg):
-    """
-    The 'null' operator. Does nothing.
-
-    :param cfg: the configuration to be changed
-    """
-    pass
-
-  # Stochastic variators
-  def op3_swarm(self, cfg, cfg1, cfg2, c, c1, c2, *args, **kwargs):
-    """
-    Stochastically 'move' the parameter value in a configuration towards those
-    in two parent configurations. This is done by calling :py:meth:`opn_stochastic_mix`
-
-    :param cfg: the configuration to be changed
-    :param cfg1: a parent configuration
-    :param cfg2: a parent configuration
-    :param c: weight of original configuration
-    :param c1: weight for cfg1
-    :param c2: weight for cfg2
-    """
-    # default to probabilistic treatment
-    self.opn_stochastic_mix(cfg, [cfg, cfg1, cfg2], [c, c1, c2])
-
-  def opn_stochastic_mix(self, cfg, cfgs, ratio, *args, **kwargs):
-    """
-    Stochastically recombine a list of parent values into a single result.
-
-    This randomly copies a value from a list of parents configurations according
-    to a list of weights.
-
-    :param cfg: the configuration to be changed
-    :param cfgs: a list of parent configurations
-    :param ratio: a list of floats representing the weight of each configuration
-     in cfgs
-
-    """
-    assert len(cfgs) == len(ratio)
-    r = random.random()
-    c = numpy.array(ratio, dtype=float) / sum(ratio)
-    for i in range(len(c)):
-      if r < sum(c[:i + 1]):
-        self.copy_value(cfg, cfgs[i])
-        break
-
-
-class PrimitiveParameter(Parameter):
-  """
-  An abstract interface implemented by parameters that represent a single
-  dimension in a cartesian space in a legal range
-  """
-  __metaclass__ = abc.ABCMeta
-
-  def __init__(self, name, value_type=float, **kwargs):
-    self.value_type = value_type
-    super(PrimitiveParameter, self).__init__(name, **kwargs)
-
-  def hash_value(self, config):
-    """produce unique hash for this value in the config"""
-    self.normalize(config)
-    return hashlib.sha256(repr(self.get_value(config))).hexdigest()
-
-  def copy_value(self, src, dst):
-    """copy the value of this parameter from src to dst config"""
-    self.set_value(dst, self.get_value(src))
-
-  def same_value(self, cfg1, cfg2):
-    """test if cfg1 and cfg2 have the same value of this parameter"""
-    return self.get_value(cfg1) == self.get_value(cfg2)
-
-  def is_integer_type(self):
-    """true if self.value_type can only represent integers"""
-    return self.value_type(0) == self.value_type(0.1)
-
-  def get_unit_value(self, config):
-    """get_value scaled such that range is between 0.0 and 1.0"""
-    low, high = self.legal_range(config)
-    if self.is_integer_type():
-      # account for rounding
-      low -= 0.4999
-      high += 0.4999
-    val = self.get_value(config)
-    if low < high:
-      return float(val - low) / float(high - low)
-    else:
-      if low > high:
-        log.warning('invalid range for parameter %s, %s to %s',
-                    self.name, low, high)
-      # only a single legal value!
-      return 0.0
-
-  def set_unit_value(self, config, unit_value):
-    """set_value scaled such that range is between 0.0 and 1.0"""
-    assert 0.0 <= unit_value <= 1.0
-    low, high = self.legal_range(config)
-    if self.is_integer_type():
-      # account for rounding
-      low -= 0.4999
-      high += 0.4999
-    if low < high:
-      val = unit_value * float(high - low) + low
-      if self.is_integer_type():
-        val = round(val)
-      val = max(low, min(val, high))
-      self.set_value(config, self.value_type(val))
-
-  def op1_normal_mutation(self, cfg, sigma=0.1, *args, **kwargs):
-    """
-    apply normally distributed noise to this parameter's value in a
-    configuration
-
-    :param cfg: The configuration to be changed
-    :param sigma: the std. deviation of the normally distributed noise on a unit
-     scale
-    """
-    v = self.get_unit_value(cfg)
-    v += random.normalvariate(0.0, sigma)
-    # handle boundary cases by reflecting off the edge
-    if v < 0.0:
-      v *= -1.0
-    if v > 1.0:
-      v = 1.0 - (v % 1)
-    self.set_unit_value(cfg, v)
-
-  def op4_set_linear(self, cfg, cfg_a, cfg_b, cfg_c, a, b, c):
-    """
-    set the parameter value in a configuration to a linear combination of 3
-    other configurations: :math:`a*cfg_a + b*cfg_b + c*cfg_c`
-
-    :param cfg: The configuration to be changed
-    :param cfg_a: a parent configuration
-    :param cfg_b: a parent configuration
-    :param cfg_c: a parent configuration
-    :param a: weight for cfg_a
-    :param b: weight for cfg_b
-    :param c: weight for cfg_c
-    """
-    va = self.get_unit_value(cfg_a)
-    vb = self.get_unit_value(cfg_b)
-    vc = self.get_unit_value(cfg_c)
-    v = a * va + b * vb + c * vc
-    v = max(0.0, min(v, 1.0))
-
-    self.set_unit_value(cfg, v)
-
-  def manipulators(self, config):
-    """
-    a list of manipulator functions to change this value in the config
-    manipulators must be functions that take a config and change it in place
-
-    for primitive params default implementation is uniform random and normal
-    """
-    return [self.op1_randomize, self.op1_normal_mutation]
-
-  @abc.abstractmethod
-  def set_value(self, config, value):
-    """assign this value in the given configuration"""
-    pass
-
-  @abc.abstractmethod
-  def get_value(self, config):
-    """retrieve this value from the given configuration"""
-    return 0
-
-  @abc.abstractmethod
-  def legal_range(self, config):
-    """return the legal range for this parameter, inclusive"""
-    return 0, 1
-
-
-class NumericParameter(PrimitiveParameter):
-  """
-  A parameter representing a number with a minimum and maximum value
-  """
-  def __init__(self, name, min_value, max_value, **kwargs):
-    """min/max are inclusive"""
-    assert min_value <= max_value
-    super(NumericParameter, self).__init__(name, **kwargs)
-    # after super call so self.value_type is initialized
-    self.min_value = self.value_type(min_value)
-    self.max_value = self.value_type(max_value)
-
-  def seed_value(self):
-    """some legal value of this parameter (for creating initial configs)"""
-    return self.min_value
-
-  def set_value(self, config, value):
-    assert value >= self.min_value
-    assert value <= self.max_value
-    self._set(config, value)
-
-  def get_value(self, config):
-    return self._get(config)
-
-  def legal_range(self, config):
-    return self.min_value, self.max_value
-
-  def op1_randomize(self, config):
-    """
-    Set this parameter's value in a configuration to a random value in its legal
-     range
-
-    :param config: the configuration to be changed
-    """
-    if self.is_integer_type():
-      self.set_value(config, random.randint(*self.legal_range(config)))
-    else:
-      self.set_value(config, random.uniform(*self.legal_range(config)))
-
-  def op1_scale(self, cfg, k):
-    """
-    Scale this parameter's value in a configuration by a constant factor
-
-    :param cfg: the configuration to be changed
-    :param k: the constant factor to scale the parameter value by
-    """
-    v = self.get_value(cfg) * k
-    v = max(self.min_value, min(self.max_value, v))
-    self.set_value(cfg, v)
-
-  def op3_difference(self, cfg, cfg1, cfg2):
-    """
-    Set this parameter's value in a configuration to the difference between this
-    parameter's values in 2 other configs (cfg2 - cfg1)
-
-    :param cfg: the configuration to be changed
-    :param cfg1: The configuration whose parameter value is being subtracted
-    :param cfg2: The configuration whose parameter value is subtracted from
-    """
-    v = self.get_value(cfg2) - self.get_value(cfg1)
-    v = max(self.min_value, min(self.max_value, v))
-    self.set_value(cfg, v)
-
-  def opn_sum(self, cfg, *cfgs):
-    """
-    Set this parameter's value in a configuration to the sum of it's values in a
-     list of configurations
-
-    :param cfg: the configuration to be changed
-    :param cfgs: a list of configurations to sum
-    """
-    v = sum([self.get_value(c) for c in cfgs])
-    v = max(self.min_value, min(self.max_value, v))
-    self.set_value(cfg, v)
-
-  def search_space_size(self):
-    if self.value_type is float:
-      return 2 ** 32
-    else:
-      return self.max_value - self.min_value + 1  # inclusive range
-
-
-class IntegerParameter(NumericParameter):
-  """
-  A parameter representing an integer value in a legal range
-  """
-  def __init__(self, name, min_value, max_value, **kwargs):
-    """min/max are inclusive"""
-    kwargs['value_type'] = int
-    super(IntegerParameter, self).__init__(name, min_value, max_value, **kwargs)
-
-  def op3_swarm(self, cfg, cfg1, cfg2, c=1, c1=0.5,
-                c2=0.5, velocity=0, sigma=0.2, *args, **kwargs):
-    """
-    Simulates a single update step in particle swarm optimization by updating
-    the current position and returning a new velocity.
-
-    The new velocity is given by
-
-    .. math:: c*velocity + r1*c1*(cfg1-cfg) + r2*c2*(cfg2-cfg)
-
-    where r1 and r2 are random values between 0 and 1.
-
-    The new current position is the new velocity with gaussian noise added.
-
-    :param cfg: the configuration to be changed. Represents the current position
-    :param cfg1: a configuration to shift towards. Should be the local best
-     position
-    :param cfg2: a configuration to shift towards. Should be the global best
-     position
-    :param c: the weight of the current velocity
-    :param c1: weight of cfg1
-    :param c2: weight of cfg2
-    :param velocity: the old velocity
-    :param sigma: standard deviation of the gaussian noise, on a unit-scale
-    :return: the new velocity, a float
-
-    """
-    vmin, vmax = self.legal_range(cfg)
-    k = vmax - vmin
-    # calculate the new velocity
-    v = velocity * c + (self.get_value(cfg1) - self.get_value(
-        cfg)) * c1 * random.random() + (self.get_value(
-        cfg2) - self.get_value(cfg)) * c2 * random.random()
-    # Map velocity to continuous space with sigmoid
-    s = k / (1 + numpy.exp(-v)) + vmin
-    # Add Gaussian noise
-    p = random.gauss(s, sigma * k)
-    # Discretize and bound
-    p = int(min(vmax, max(round(p), vmin)))
-    self.set_value(cfg, p)
-    return v
-
-
-class FloatParameter(NumericParameter):
-  def __init__(self, name, min_value, max_value, **kwargs):
-    """min/max are inclusive"""
-    kwargs['value_type'] = float
-    super(FloatParameter, self).__init__(name, min_value, max_value, **kwargs)
-
-  def op3_swarm(self, cfg, cfg1, cfg2, c=1, c1=0.5,
-                c2=0.5, velocity=0, *args, **kwargs):
-    """
-
-    Simulates a single update step in particle swarm optimization by updating
-    the current position and returning a new velocity.
-
-    The new velocity is given by
-
-    .. math:: c*velocity + r1*c1*(cfg1-cfg) + r2*c2*(cfg2-cfg)
-
-    where r1 and r2 are random values between 0 and 1
-
-    The new current position is the old current position offset by the new
-    velocity:
-
-    :param cfg: the configuration to be changed. Represents the current position
-    :param cfg1: a configuration to shift towards. Should be the local best
-     position
-    :param cfg2: a configuration to shift towards. Should be the global best
-     position
-    :param c: the weight of the current velocity
-    :param c1: weight of cfg1
-    :param c2: weight of cfg2
-    :param velocity: the old velocity
-    :return: the new velocity, a float
-
-    """
-    vmin, vmax = self.legal_range(cfg)
-    v = velocity * c + (self.get_value(cfg1) - self.get_value(
-        cfg)) * c1 * random.random() + (self.get_value(
-        cfg2) - self.get_value(cfg)) * c2 * random.random()
-    p = self.get_value(cfg) + v
-    p = min(vmax, max(p, vmin))
-    self.set_value(cfg, p)
-    return v
-
-
-class ScaledNumericParameter(NumericParameter):
-  """
-  A Parameter that is stored in configurations normally, but has a scaled
-  value when accessed using 'get_value'.
-  Because search techniques interact with Parameters through get_value, these
-  parameters are searched on a different scale (e.g. log scale).
-  """
-
-  @abc.abstractmethod
-  def _scale(self, v):
-    """
-    called on a value when getting it from it's configuration. Transforms the
-    actual value to the scale it is searched on
-    """
-    return v
-
-  @abc.abstractmethod
-  def _unscale(self, v):
-    """
-    called on a value when storing it. Transforms a value from it's search scale
-    to it's actual value
-    """
-    return v
-
-  def set_value(self, config, value):
-    NumericParameter.set_value(self, config, self._unscale(value))
-
-  def get_value(self, config):
-    return self._scale(NumericParameter.get_value(self, config))
-
-  def legal_range(self, config):
-    return map(self._scale, NumericParameter.legal_range(self, config))
-
-
-class LogIntegerParameter(ScaledNumericParameter, FloatParameter):
-  """
-  an integer value that is searched on a log scale, but stored without scaling
-  """
-
-  def _scale(self, v):
-    return math.log(v + 1.0 - self.min_value, 2.0)
-
-  def _unscale(self, v):
-    v = 2.0 ** v - 1.0 + self.min_value
-    v = int(round(v))
-    return v
-
-  def legal_range(self, config):
-    low, high = NumericParameter.legal_range(self, config)
-    # increase the bounds account for rounding
-    return self._scale(low - 0.4999), self._scale(high + 0.4999)
-
-
-class LogFloatParameter(ScaledNumericParameter, FloatParameter):
-  """
-  a float parameter that is searched on a log scale, but stored without scaling
-  """
-
-  def _scale(self, v):
-    return math.log(v + 1.0 - self.min_value, 2.0)
-
-  def _unscale(self, v):
-    v = 2.0 ** v - 1.0 + self.min_value
-    return v
-
-
-class PowerOfTwoParameter(ScaledNumericParameter, IntegerParameter):
-  """
-  An integer power of two, with a min and max value. Searched by the exponent
-  """
-
-  def __init__(self, name, min_value, max_value, **kwargs):
-    kwargs['value_type'] = int
-    assert min_value >= 1
-    assert math.log(min_value, 2) % 1 == 0  # must be power of 2
-    assert math.log(max_value, 2) % 1 == 0  # must be power of 2
-    super(PowerOfTwoParameter, self).__init__(name, min_value, max_value,
-                                              **kwargs)
-
-  def _scale(self, v):
-    return int(math.log(v, 2))
-
-  def _unscale(self, v):
-    return 2 ** int(v)
-
-  def legal_range(self, config):
-    return int(math.log(self.min_value, 2)), int(math.log(self.max_value, 2))
-
-  def search_space_size(self):
-    return int(math.log(super(PowerOfTwoParameter, self).search_space_size(), 2))
-
-
-##################
-
-class ComplexParameter(Parameter):
-  """
-  A non-cartesian parameter that can't be manipulated directly, but has a set
-  of user defined manipulation functions
-  """
-
-  def copy_value(self, src, dst):
-    """copy the value of this parameter from src to dst config"""
-    self._set(dst, copy.deepcopy(self._get(src)))
-
-  def same_value(self, cfg1, cfg2):
-    """test if cfg1 and cfg2 have the same value of this parameter"""
-    return self._get(cfg1) == self._get(cfg2)
-
-  def hash_value(self, config):
-    """produce unique hash for this value in the config"""
-    self.normalize(config)
-    return hashlib.sha256(repr(self._get(config))).hexdigest()
-
-  def get_value(self, config):
-    return self._get(config)
-
-  def set_value(self, config, value):
-    self._set(config, value)
-
-  def op4_set_linear(self, cfg, cfg_a, cfg_b, cfg_c, a, b, c):
-    """
-    set this value to :math:`a*cfg_a + b*cfg_b + c*cfg_c`
-
-    this operation is not possible in general with complex parameters but
-    we make an attempt to "fake" it for common use cases
-
-    basically a call to randomize unless after normalization,
-    a = 1.0, b == -c, and cfg_b == cfg_c, in which case nothing is done
-
-    :param cfg: the configuration to be changed
-    :param cfg_a: a parent configuration
-    :param cfg_b: a parent configuration
-    :param cfg_c: a parent configuration
-    :param a: weight for cfg_a
-    :param b: weight for cfg_b
-    :param c: weight for cfg_c
-    """
-    # attempt to normalize order, we prefer a==1.0
-    if a != 1.0 and b == 1.0:  # swap a and b
-      a, cfg_a, b, cfg_b = b, cfg_b, a, cfg_a
-    if a != 1.0 and c == 1.0:  # swap a and c
-      a, cfg_a, c, cfg_c = c, cfg_c, a, cfg_a
-
-    # attempt to normalize order, we prefer b==-c
-    if b < c:  # swap b and c
-      b, cfg_b, c, cfg_c = c, cfg_c, b, cfg_b
-    if b != -c and a == -c:  # swap a and c
-      a, cfg_a, c, cfg_c = c, cfg_c, a, cfg_a
-
-    if a == 1.0 and b == -c:
-      self.copy_value(cfg_a, cfg)
-      self.add_difference(cfg, b, cfg_b, cfg_c)  # TODO inline this logic?
-    else:
-      # TODO: should handle more cases
-      self.op1_randomize(cfg)
-
-  def add_difference(self, cfg_dst, scale, cfg_b, cfg_c):
-    """
-    add the difference cfg_b-cfg_c to cfg_dst
-
-    this is the key operation used in differential evolution
-    and some simplex techniques
-
-    this operation is not possible in general with complex parameters but
-    we make an attempt to "fake" it
-    """
-    if not self.same_value(cfg_b, cfg_c):
-      self.op1_randomize(cfg_dst)
-
-  @abc.abstractmethod
-  def op1_randomize(self, config):
-    """
-    randomize this value without taking into account the current position
-    :param config: the configuration to be changed
-    """
-    pass
-
-  @abc.abstractmethod
-  def seed_value(self):
-    """some legal value of this parameter (for creating initial configs)"""
-    return
-
-
-class BooleanParameter(ComplexParameter):
-  def manipulators(self, config):
-    return [self.op1_flip]
-
-  def get_value(self, config):
-    return self._get(config)
-
-  def set_value(self, config, value):
-    self._set(config, value)
-
-  def op1_randomize(self, config):
-    """
-    Set this parameter's value in a configuration randomly
-
-    :param config: the configuration to be changed
-    """
-    self._set(config, self.seed_value())
-
-  def seed_value(self):
-    return random.choice((True, False))
-
-  def op1_flip(self, config):
-    """
-    Flip this parameter's value in a configuration
-
-    :param config: the configuration to be changed
-    """
-    self._set(config, not self._get(config))
-
-  def search_space_size(self):
-    return 2
-
-  def op3_swarm(self, cfg, cfg1, cfg2, c=1, c1=0.5,
-                c2=0.5, velocity=0, *args, **kwargs):
-    """
-    Simulates a single update step in particle swarm optimization by updating
-    the current position and returning a new velocity.
-
-    The new velocity is given by
-
-    .. math:: c*velocity + r1*c1*(cfg1-cfg) + r2*c2*(cfg2-cfg)
-
-    where r1 and r2 are random values between 0 and 1
-
-    The new current position is randomly chosen based on the new velocity
-
-    :param cfg: the configuration to be changed. Represents the current position
-    :param cfg1: a configuration to shift towards. Should be the local best position
-    :param cfg2: a configuration to shift towards. Should be the global best position
-    :param c: the weight of the current velocity
-    :param c1: weight of cfg1
-    :param c2: weight of cfg2
-    :param velocity: the old velocity
-    :param args:
-    :param kwargs:
-    :return: the new velocity, a float
-
-    """
-    v = velocity * c + (self.get_value(cfg1) - self.get_value(
-        cfg)) * c1 * random.random() + (self.get_value(
-        cfg2) - self.get_value(cfg)) * c2 * random.random()
-    # Map velocity to continuous space with sigmoid
-    s = 1 / (1 + numpy.exp(-v))
-    # Decide position randomly
-    p = (s - random.random()) > 0
-    self.set_value(cfg, p)
-    return v
-
-
-class SwitchParameter(ComplexParameter):
-  """
-  A parameter representing an unordered collection of options with no implied
-  correlation between the choices. The choices are range(option_count)
-  """
-
-  def __init__(self, name, option_count):
-    self.option_count = option_count
-    super(SwitchParameter, self).__init__(name)
-
-  def op1_randomize(self, config):
-    """
-    Set this parameter's value in a configuration to a random value
-
-    :param config: the configuration to be changed
-    """
-    self._set(config, random.randrange(self.option_count))
-
-  def seed_value(self):
-    return random.randrange(self.option_count)
-
-  def search_space_size(self):
-    return max(1, self.option_count)
-
-
-class EnumParameter(ComplexParameter):
-  """
-  same as a SwitchParameter but choices are taken from an arbitrarily typed list
-  """
-
-  def __init__(self, name, options):
-    super(EnumParameter, self).__init__(name)
-    self.options = list(options)
-
-  def op1_randomize(self, config):
-    """
-    Set this parameter's value in a configuration to a random value
-
-    :param config: the configuration to be changed
-    """
-    self._set(config, random.choice(self.options))
-
-  def seed_value(self):
-    return random.choice(self.options)
-
-  def search_space_size(self):
-    return max(1, len(self.options))
-
-
-class PermutationParameter(ComplexParameter):
-  """
-  A parameter representing a permutation (or ordering) as a list of items
-  """
-  def __init__(self, name, items):
-    super(PermutationParameter, self).__init__(name)
-    self._items = list(items)
-    self.size = len(items)
-
-  def op1_randomize(self, config):
-    """
-    Set this parameter's value in a configuration to a random value
-
-    :param config: the configuration to be changed
-    """
-    random.shuffle(self._get(config))
-    self.normalize(config)
-
-  def op1_small_random_change(self, config, p=0.25):
-    """
-    Iterates through the list and probabilistically swaps each element with the
-    next element
-
-    :param p: probability of swapping an element with the next element
-    :param config: the configuration to be changed
-    """
-    cfg_item = self._get(config)
-    for i in xrange(1, len(cfg_item)):
-      if random.random() < p:
-        # swap
-        cfg_item[i - 1], cfg_item[i] = cfg_item[i], cfg_item[i - 1]
-    self.normalize(config)
-
-  def seed_value(self):
-    return list(self._items)  # copy
-
-  def manipulators(self, config):
-    return [self.op1_randomize, self.op1_small_random_change]
-
-  def get_value(self, config):
-    return self._get(config)
-
-  def set_value(self, config, value):
-    self._set(config, value)
-
-  def search_space_size(self):
-    return math.factorial(max(1, len(self._items)))
-
-  def op3_cross(self, cfg, cfg1, cfg2, xchoice='op3_cross_OX1', strength=0.3,
-                *args, **kwargs):
-    """
-    Calls the crossover operator specified by xchoice
-    Passes argument d = strength*(size of the permutation)
-
-    :param cfg: the configuration to be changed
-    :param cfg1: a parent configuration
-    :param cfg2: a parent configuration
-    :param xchoice: string specifying which crossover operator to use (should start with op3_cross prefix)
-    :param strength: the strength of the crossover
-    """
-    dd = int(round(self.size * strength))
-    if dd < 1:
-      log.warning('Crossover length too small. Cannot create new solution.')
-    if dd >= self.size:
-      log.warning('Crossover length too big. Cannot create new solution.')
-    getattr(self, xchoice)(cfg, cfg1, cfg2, d=dd, *args, **kwargs)
-
-  def op3_swarm(self, cfg, cfg1, cfg2, xchoice='op3_cross_OX1', c=0.5,
-                c1=0.5, c2=0.5, strength=0.3, velocity=0, *args, **kwargs):
-    """
-    Replacement for particle swarm optimization iterative step for permutations.
-    Given a target cfg and 2 parent cfgs, probabilistically performs an
-    op3_cross with one of the 2 parents.
-
-    :param cfg: the configuration to be changed. Represents the current position
-    :param cfg1: a configuration to shift towards. Should be the local best
-     position
-    :param cfg2: a configuration to shift towards. Should be the global best
-     position
-    :param xchoice: which crossover operator should be used
-    :param c: the probability of not performing a crossover
-    :param c1: the probability of performing a crossover with cfg1 (if a
-     crossover is performed)
-    :param c2: unused
-    :param strength: the strength of the crossover
-    :param velocity: the old velocity - unused
-    """
-    if random.uniform(0, 1) > c:
-      if random.uniform(0, 1) < c1:
-        # Select crossover operator
-        self.op3_cross(cfg, cfg, cfg1, xchoice, strength)
-      else:
-        self.op3_cross(cfg, cfg, cfg2, xchoice, strength)
-
-  # swap-based operators
-  def op2_random_swap(self, cfg, cfg1, *args, **kwargs):
-    """
-    Swap a random pair of items in cfg1 and save the result into cfg
-
-    :param cfg: the configuration to be changed
-    :param cfg1: the configuration whose PermutationParameter's elements are
-     swapped and copied into cfg
-    """
-    p = self.get_value(cfg1)[:]
-    r = random.randint(0, len(p) - 1)
-    s = random.randint(0, len(p) - 1)
-    v1 = p[r]
-    v2 = p[s]
-    p[r] = v2
-    p[s] = v1
-    self.set_value(cfg, p)
-
-  def op2_random_invert(self, cfg, cfg1, strength=0.3, *args, **kwargs):
-    """
-    Reverse the ordering of a random subsection of size d in cfg1 and save the
-    result in cfg where d = strength*total-size
-
-    :param cfg: the configuration to be changed
-    :param cfg1: the configuration whose PermutationParameter is inverted
-    :param strength: the size of the reversed subsection as a fraction of the
-     total size
-    """
-    p = self.get_value(cfg1)[:]
-    d = int(round(len(p) * strength))
-    r = random.randint(0, len(p) - d)
-    subpath = p[r:r + d][:]
-    subpath.reverse()
-    p[r:r + d] = subpath
-    self.set_value(cfg, p)
-
-  # Crossover operators
-  def op3_cross_PX(self, cfg, cfg1, cfg2, d=0):
-    """
-    Partition crossover (Whitley 2009?)
-
-    Chooses a random cut point and reorders elements in cfg1 up to the cut point
-    according to their order in cfg2.
-
-    Saves the result in cfg
-
-    :param cfg: the configuration to be changed
-    :param cfg1: the first parent configuration. The "base" configuration
-    :param cfg2: the second parent configuration. Is "crossed into" cfg1
-    :param d: unused
-    """
-    p1 = self.get_value(cfg1)
-    p2 = self.get_value(cfg2)
-    c1 = random.randint(2, len(p1))
-    self.set_value(cfg, sorted(p1[:c1], key=lambda x: p2.index(x)) + p1[c1:])
-
-  def op3_cross_PMX(self, cfg, cfg1, cfg2, d=0):
-    """
-    Partially-mapped crossover Goldberg & Lingle (1985)
-
-    Replaces a random section of cfg1 with the corresponding section in cfg2.
-    Displaced elements in cfg1 are moved to the old position of the elements
-    displacing them
-
-    :param cfg: the configuration to be changed
-    :param cfg1: the first parent configuration. The "base" configuration
-    :param cfg2: the second parent configuration. Is "crossed into" cfg1
-    :param d: the size of the crossover
-    """
-    if d == 0:
-      d = max(1, int(round(self.size * 0.3))) # default to 1/3 of permutation size
-    p1 = self.get_value(cfg1)[:]
-    p2 = self.get_value(cfg2)[:]
-
-    r = random.randint(0, len(p1) - d)
-
-    c1 = p1[r:r + d]
-    c2 = p2[r:r + d]
-
-    # get new permutation by crossing over a section of p2 onto p1
-    pnew = self.get_value(cfg1)[:]
-    pnew[r:r + d] = c2
-    # fix conflicts by taking displaced elements in crossed over section
-    # displaced = (elements x in c1 where x does not have corresponding value in c2)
-    # and putting them where the value that displaced them was
-
-    #candidates for displacement
-    candidate_indices = set(range(r) + range(r+d, len(p1)))
-    # Check through displaced elements to find values to swap conflicts to
-    while c1 != []:
-      n = c1[0]
-      #try to match up a value in c1 to the equivalent value in c2
-      while c2[0] in c1:
-        if n == c2[0]:
-          # already match up
-          break
-        # find position idx of c2[0] in c1
-        link_idx = c1.index(c2[0])
-        # get value of c2 at idx
-        link = c2[link_idx]
-        # remove c2[idx] and c1[idx] since they match up when we swap c2[0] with c2[idx] (this avoids an infinite loop)
-        del c2[link_idx]
-        del c1[link_idx]
-        # swap new value into c2[0]
-        c2[0] = link
-
-      if n != c2[0]:
-        # first check if we can swap in the crossed over section still
-        if n in c2:
-          c2[c2.index(n)] = c2[0]
-        else:
-          # assign first instance of c2[0] outside of the crossed over section in pnew to c1[0]
-          for idx in candidate_indices:
-            if pnew[idx] == c2[0]:
-              pnew[idx] = c1[0]
-              candidate_indices.remove(idx) # make sure we don't override this value now
-              break
-      # remove first elements
-      del c1[0]
-      del c2[0]
-    self.set_value(cfg, pnew)
-
-  def op3_cross_CX(self, cfg, cfg1, cfg2, d=0):
-    """
-    Implementation of a cyclic crossover.
-
-    Repeatedly replaces elements of cfg1 with the element at the same index in
-    cfg2. This is done until a cycle is reached and cfg1 is valid again. The
-    initial replacement is random.
-
-    Saves the result in cfg.
-
-    :param cfg: the configuration to be changed
-    :param cfg1: the first parent configuration. The "base" configuration
-    :param cfg2: the second parent configuration. Is "crossed into" cfg1
-    :param d: unused
-    """
-    p1 = self.get_value(cfg1)
-    p2 = self.get_value(cfg2)
-    p = p1[:]
-
-    s = random.randint(0, len(p1) - 1)
-    i = s
-    indices = set()
-
-    while len(indices) < len(p1): # should never exceed this
-      indices.add(i)
-      val = p1[i]
-      i = p2.index(val)
-      # deal with duplicate values
-      while i in indices:
-        if i == s:
-          break
-        i = p2[i+1:].index(val) + i + 1
-      if i == s:
-        break
-
-    for j in indices:
-      p[j] = p2[j]
-
-    self.set_value(cfg, p)
-
-  def op3_cross_OX1(self, cfg, cfg1, cfg2, d=0):
-    """
-    Ordered Crossover (Davis 1985)
-
-    Exchanges a subpath from cfg2 into cfg1 while maintaining the order of the
-    remaining elements in cfg1.
-
-    Saves the result in cfg.
-
-    :param cfg: the configuration to be changed
-    :param cfg1: the first parent configuration. The "base" configuration
-    :param cfg2: the second parent configuration. Is "crossed into" cfg1
-    :param d: size of the exchanged subpath
-    """
-    if d == 0:
-      d = max(1, int(round(self.size * 0.3))) # default to 1/3 of permutation size
-    p1 = self.get_value(cfg1)
-    p2 = self.get_value(cfg2)
-    c1 = p1[:]
-    c2 = p2[:]
-    # Randomly find cut points
-    r = random.randint(0, len(
-        p1) - d)  # Todo: treat path as circle i.e. allow cross-boundary cuts
-    [c1.remove(i) for i in p2[r:int(r + d)]]
-    self.set_value(cfg, c1[:r] + p2[r:r + d] + c1[r:])
-
-  def op3_cross_OX3(self, cfg, cfg1, cfg2, d=0):
-    """
-    Ordered crossover variation 3 (Deep 2010)
-
-    Same as op3_cross_OX1, except the parents have different cut points for
-    their subpaths
-
-    :param cfg: the configuration to be changed
-    :param cfg1: the first parent configuration. The "base" configuration
-    :param cfg2: the second parent configuration. Is "crossed into" cfg1
-    :param d: size of the exchanged subpath
-    """
-    if d == 0:
-      d = max(1, int(round(self.size * 0.3))) # default to 1/3 of permutation size
-    p1 = self.get_value(cfg1)
-    p2 = self.get_value(cfg2)
-    c1 = p1[:]
-    c2 = p2[:]
-    # Randomly find cut points
-    # Todo: treat path as circle i.e. allow cross-boundary cuts
-    r1 = random.randint(0, len(p1) - d)
-    r2 = random.randint(0, len(p1) - d)
-    [c1.remove(i) for i in p2[r2:r2 + d]]
-    self.set_value(cfg, c1[:r1] + p2[r2:r2 + d] + c1[r1:])
-
-  def search_space_size(self):
-    return math.factorial(max(1, len(self._items)))
-
-
-class ScheduleParameter(PermutationParameter):
-  def __init__(self, name, items, deps):
-    super(ScheduleParameter, self).__init__(name, items)
-    self.deps = dict((k, set(v)) for k, v in deps.items())
-    log.debug("ScheduleParameter(%s, %s, %s)", repr(name), repr(items),
-              repr(deps))
-    self._expand_deps()
-
-  def _expand_deps(self):
-    """expand self.deps to include recursive dependencies"""
-    fixed_point = False
-    while not fixed_point:
-      fixed_point = True
-      for k in self.deps.keys():
-        oldlen = len(self.deps[k])
-        for dep in list(self.deps[k]):
-          if dep in self.deps:
-            self.deps[k].update(self.deps[dep])
-        if oldlen != len(self.deps[k]):
-          fixed_point = False
-
-    # verify schedule is valid
-    items = set(self._items)
-    for k, v in self.deps.items():
-      if k in v:
-        raise Exception("ScheduleParameter('%s') cycle: %s depends on itself" %
-                        (self.name, k))
-
-      if v - items:
-        raise Exception("ScheduleParameter('%s'): %s is unknown" %
-                        (self.name, v - items))
-
-    if set(self.deps.keys()) - items:
-      raise Exception("ScheduleParameter('%s'): %s is unknown" %
-                      (self.name, set(self.deps.keys()) - items))
-
-  def is_topologically_sorted(self, values):
-    used = set()
-    for v in values:
-      if v in self.deps and self.deps[v].union(used):
-        return False
-      used.add(v)
-    return True
-
-  def topologically_sorted_depth_first(self, values):
-    """faster but not stable enough"""
-    if self.is_topologically_sorted(values):
-      return values
-    sorted_values = []
-    used = set()
-    deps = dict((k, sorted(v, key=values.index, reverse=True))
-                for k, v in self.deps.items())
-
-    def visit(v):
-      if v in used:
-        return
-      if v in deps:
-        for dv in deps[v]:
-          visit(dv)
-      used.add(v)
-      sorted_values.append(v)
-
-    for v in reversed(values):
-      visit(v)
-    return list(reversed(sorted_values))
-
-  def topologically_sorted(self, values):
-    if self.is_topologically_sorted(values):
-      return values
-    deps = copy.deepcopy(self.deps)
-    queue = collections.deque(reversed(values))
-    sorted_values = []
-    while queue:
-      v = queue.popleft()
-      if v in deps and deps[v]:
-        queue.append(v)
-      else:
-        for k, d in deps.items():
-          d.discard(v)
-          if not d:
-            del deps[k]
-        sorted_values.append(v)
-
-    return list(reversed(sorted_values))
-
-  def normalize(self, cfg):
-    self._set(cfg, self.topologically_sorted(self._get(cfg)))
-
-
-class SelectorParameter(ComplexParameter):
-  def __init__(self, name, choices, max_cutoff,
-               order_class=PermutationParameter,
-               offset_class=LogIntegerParameter):
-    super(SelectorParameter, self).__init__(name)
-    self.choices = choices
-    self.max_cutoff = max_cutoff
-    self.order_param = order_class('{0}/order'.format(name), choices)
-    self.offset_params = [
-        offset_class('{0}/offsets/{1}'.format(name, i), 0, max_cutoff)
-        for i in xrange(len(choices) - 1)]
-
-  def sub_parameters(self):
-    return [self.order_param] + self.offset_params
-
-  def seed_value(self):
-    return {'order': self.order_param.seed_value(),
-            'offsets': [co.seed_value() for co in self.offset_params]}
-
-  def op1_randomize(self, config):
-    random.choice(self.sub_parameters()).op1_randomize(config)
-
-  def selector_iter(self, config):
-    """
-    yield (cutoff, choice) pairs
-    cutoff will be None on the first value
-    """
-    order = config[self.name]['order']
-    yield (None, order[0])
-    cutoff = 0
-    for n, offset in enumerate(config[self.name]['offsets']):
-      if offset > 0:
-        cutoff += offset
-        yield cutoff, order[n + 1]
-
-
-class ParameterArray(ComplexParameter):
-  """
-  Represents an array of Parameters
-  """
-  def __init__(self, name, count, element_type, *args, **kwargs):
-    super(ParameterArray, self).__init__(name)
-    self.count = count
-
-    self.sub_params = [
-        element_type('{0}/{1}'.format(name, i), *args[i], **kwargs[i])
-        for i in xrange(count)]
-
-  def sub_parameters(self):
-    return self.sub_params
-
-  def seed_value(self):
-    return [p.seed_value() for p in self.sub_params]
-
-  def op1_randomize(self, config):
-    """
-    randomly selects a sub-parameter and randomizes it
-
-    :param config: the configuration to be changed
-    """
-    random.choice(self.sub_parameters()).op1_randomize(config)
-
-
-class BooleanParameterArray(ParameterArray):
-  """
-  Represents an array of BooleanParameters - currently unimplimented
-  """
-  def __init__(self, name, count):
-    super(BooleanParameterArray, self).__init__(name, count, BooleanParameter)
-
-  def op3_swarm(self, cfg, cfg1, cfg2, *args, **kwargs):
-    # TODO
-    pass
-
-  def op3_cross(self, cfg, cfg1, cfg2, *args, **kwargs):
-    # TODO
-    pass
-
-
-class IntegerParameterArray(ParameterArray):
-  """
-  Represents an array of IntegerParameters - currently unimplemented
-  """
-  def __init__(self, name, min_values, max_values):
-    assert len(min_values) == len(max_values)
-    super(IntegerParameterArray, self).__init__(name, len(min_values),
-                                                IntegerParameter,
-                                                min_value=min_values,
-                                                max_value=max_values)
-
-  def op3_swarm(self, cfg, cfg1, cfg2, *args, **kwargs):
-    # TODO
-    pass
-
-  def op3_cross(self, cfg, cfg1, cfg2, *args, **kwargs):
-    # TODO
-    pass
-
-
-class Array(ComplexParameter):
-  """
-  An interface for parameters representing an array of values.
-  """
-  # TODO: constraints? (upper & lower bound etc)
-  def __init__(self, name, size):
-    super(Array, self).__init__(name)
-    self.size = size
-
-  def op3_cross(self, cfg, cfg1, cfg2, strength=0.3, *args, **kwargs):
-    """
-    Crosses two arrays by replacing a random subsection of cfg1 with the
-    corresponding subsection of cfg2.The size of the chunk is a fixed fraction
-    of the total length, given by the strength
-
-    Behaves like a specialized 2-point crossover, where the first cut point is
-    random and the second cut is a set distance after.
-
-    :param cfg: the configuration to be changed
-    :param cfg1: the configuration being inserted into
-    :param cfg2: the configuration being inserted
-    :param strength: the size of the crossover, as a fraction of total array
-     length
-    """
-    d = int(round(self.size * strength))
-    if d < 1:
-      log.debug('Crossover length too small. Cannot create new solution.')
-    if d >= self.size:
-      log.debug('Crossover length too big. Cannot create new solution.')
-    p1 = self.get_value(cfg1)
-    p2 = self.get_value(cfg2)
-    r = random.randint(0, len(
-        p1) - d)  # Todo: treat path as circle i.e. allow cross-boundary cuts
-    p = numpy.concatenate([p1[:r], p2[r:r + d], p1[r + d:]])
-    self.set_value(cfg, p)
-
-  def op3_swarm(self, cfg, cfg1, cfg2, c=1, c1=0.5,
-                c2=0.5, velocity=0, strength=0.3, *args, **kwargs):
-    """
-    Replacement for a particle swarm optimization iterative step for arrays.
-    Given a target cfg and 2 parent cfgs, probabilistically performs an
-    :py:meth:`op3_cross` with one of the 2 parents.
-
-    :param cfg: the configuration to be changed. Represents the cfg position
-    :param cfg1: a configuration to shift towards. Should be the local best
-     position
-    :param cfg2: a configuration to shift towards. Should be the global best
-     position
-    :param c: the probability of not performing a crossover
-    :param c1: the probability of performing a crossover with cfg1 (if a
-     crossover is performed)
-    :param c2: unused
-    :param velocity: the old velocity - unused
-    :param strength: the strength of the crossover
-    """
-    if random.uniform(0, 1) > c:
-      if random.uniform(0, 1) < c1:
-        # Select crossover operator
-        self.op3_cross(cfg, cfg, cfg1, strength)
-      else:
-        self.op3_cross(cfg, cfg, cfg2, strength)
-
-  def get_value(self, config):
-    return self._get(config)
-
-  def set_value(self, config, value):
-    self._set(config, value)
-
-
-class BooleanArray(Array):
-  """
-  Represents an array of boolean values which are either 0 or 1
-  """
-  def op3_swarm_parallel(self, cfg, cfg1, cfg2, c=1,
-                         c1=0.5, c2=0.5, velocities=0):
-    """
-    Simulates a single particle swarm optimization step for each element in the
-    array by updating each position and returning an array of new velocities.
-
-    The new velocities are given by
-
-    .. math:: c*velocity + r1*c1*(cfg1-cfg) + r2*c2*(cfg2-cfg)
-
-    where r1 and r2 are random values between 0 and 1. In each iteration, r1 and
-    r2 are constant across array elements
-
-    The new cfg positions are randomly chosen based on the new velocities
-
-    :param cfg: the configuration to be changed. This represents the current
-     position
-    :param cfg1: a configuration to shift towards. Should be the local best
-     position
-    :param cfg2: a configuration to shift towards. Should be the global best
-     position
-    :param c: the weight of the current velocities
-    :param c1: weight of cfg1
-    :param c2: weight of cfg2
-    :param velocities: the current velocities
-    :return: a numpy array of new velocities
-    """
-    vs = velocities * c + (self.get_value(cfg1) - self.get_value(
-        cfg)) * c1 * random.random() + (self.get_value(
-            cfg2) - self.get_value(cfg)) * c2 * random.random()
-    # Map velocity to continuous space with sigmoid
-    ss = 1 / (1 + numpy.exp(-vs))
-    # Decide position randomly
-    ps = (ss - numpy.random.rand(1, self.size)) > 0
-    self.set_value(cfg, ps)
-    return vs
-
-  def op1_randomize(self, config):
-    """
-    Set this parameter's value in a configuration randomly
-
-    :param config: the configuration to be changed
-    """
-    value = numpy.random.rand(1, self.size) > 0.5
-    self._set(config, value)
-
-  def seed_value(self):
-    return numpy.random.rand(1, self.size) > 0.5
-
-
-class FloatArray(Array):
-  """
-  Represents an array of float values
-  """
-  def __init__(self, name, size, fmax, fmin):
-    super(FloatArray, self).__init__(name, size)
-    self.fmax = fmax
-    self.fmin = fmin
-
-  def op1_randomize(self, config):
-    """
-    Set this parameter's value in a configuration randomly
-
-    :param config: the configuration to be changed
-    """
-    value = numpy.random.rand(1, self.size) * (
-        self.fmax - self.fmin) + self.fmin
-    self._set(config, value)
-
-  def seed_value(self):
-    value = numpy.random.rand(1, self.size) * (
-        self.fmax - self.fmin) + self.fmin
-    return value
-
-  def op3_swarm_parallel(self, cfg, cfg1, cfg2, c=1,
-                         c1=0.5, c2=0.5, velocities=0):
-    """
-    Simulates a single particle swarm optimization step for each element in the
-    array by updating the each position and returning an array of new velocities
-
-    The new velocity is given by
-
-    .. math:: c*velocity + r1*c1*(cfg1-cfg) + r2*c2*(cfg2-cfg)
-
-    where r1 and r2 are random values between 0 and 1. In each iteration, r1 and
-    r2 are constant across array elements
-
-    The new cfg positions are randomly chosen based on the new velocities
-
-    :param cfg: the configuration to be changed. This represents the current
-     position
-    :param cfg1: a configuration to shift towards. Should be the local best
-     position
-    :param cfg2: a configuration to shift towards. Should be the global best
-     position
-    :param c: the weight of the cfg velocities
-    :param c1: weight of cfg1
-    :param c2: weight of cfg2
-    :param velocities: the cfg velocities
-    :return: a numpy array of new velocities
-    """
-    vs = velocities * c + (self.get_value(cfg1) - self.get_value(
-        cfg)) * c1 * random.random() + (self.get_value(
-        cfg2) - self.get_value(cfg)) * c2 * random.random()
-    p = self.get_value(cfg) + vs
-    p[p > self.fmax] = self.fmax
-    p[p < self.fmin] = self.fmin
-    self.set_value(cfg, p)
-    return vs
-
-
-##################
-
-class ManipulatorProxy(object):
-  """
-  wrapper around configuration manipulator and config pair
-  """
-
-  def __init__(self, manipulator, cfg):
-    self.cfg = cfg
-    self.manipulator = manipulator
-    self.params = manipulator.parameters_dict(self.cfg)
-
-  def keys(self):
-    return self.params.keys()
-
-  def __getitem__(self, k):
-    return ParameterProxy(self.params[k], self.cfg)
-
-
-class ParameterProxy(object):
-  """
-  wrapper aint parameter and config pair, adds config
-  as first argument to all method calls to parameter
-  """
-
-  def __init__(self, param, cfg):
-    self.cfg = cfg
-    self.param = param
-
-  def __getattr__(self, key):
-    """equivalent of self.param.key(self.cfg, ...)"""
-    member = getattr(self.param, key)
-
-    def param_method_proxy(*args, **kwargs):
-      return member(self.cfg, *args, **kwargs)
-
-    if callable(member):
-      return param_method_proxy
-    else:
-      # we should only hit this for key == 'name'
-      return member
-
-
-# Inspection Methods
-def operators(param, num_parents):
-  """
-  Return a list of operators for the given parameter that take the specified
-  number of input configurations
-
-  :param param: a Parameter class
-  :param num_parents: a String specifying number of inputs required by the operator.
-    should be one of '1', '2', '3', '4', or 'n'
-  """
-  ops = []
-  methods = inspect.getmembers(param, inspect.ismethod)
-  for m in methods:
-    name, obj = m
-    if is_operator(name, num_parents):
-      ops.append(name)
-  return ops
-
-def composable_operators(param, min_num_parents):
-  """
-  Return a list of operators for the given parameter that can be programatically composed
-  with a composable technique generating min_num_parents.
-
-  Programatically composable operators have no non-cfg arguments
-
-  :param param: a Parameter class
-  :param min_num_parents: the minimum number of parents passed to the operator
-  """
-  if min_num_parents < 1:
-    return []
-
-  allowed_num_parents = ['n']
-  for i in range(1,5):
-    if i > min_num_parents:
-      break
-    allowed_num_parents.append(str(i))
-
-  ops = []
-  methods = inspect.getmembers(param, inspect.ismethod)
-  for m in methods:
-    name, obj = m
-    argspec = inspect.getargspec(obj)
-    numargs = len(argspec.args) - (len(argspec.defaults) if argspec.defaults else 0)
-    for num_parents in allowed_num_parents:
-      if is_operator(name, num_parents):
-        if num_parents == 'n':
-          if numargs == 3: # self, cfg, cfgs
-            ops.append(name)
-        else:
-          if numargs == (1 + int(num_parents)):
-            ops.append(name)
-        break
-  return ops
-
-
-def is_operator(name, num_parents):
-  """
-  Tells whether a method is an operator taking in the specified number of inputs
-  from the method name
-
-  :param name: the method name
-  :param num_parents: a String specifying number of inputs required by the operator.
-    should be one of '1', '2', '3', '4', or 'n'
-  """
-  return ('op' + num_parents + '_') == name[:4]
-
-def all_operators():
-  """
-  Return a dictionary mapping from parameter names to lists of operator function
-  names
-  """
-  ops = {}
-  for p in all_params():
-    name, obj = p
-    all_ops = []
-    for num in ['1', '2', '3', '4', 'n']:
-      all_ops += operators(obj, num)
-    ops[name] = all_ops
-  return ops
-
-def all_params():
-  params = inspect.getmembers(sys.modules[__name__], lambda x: inspect.isclass(
-    x) and x.__module__ == __name__ and issubclass(x, Parameter))
-  return params
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/metatechniques.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/metatechniques.py
deleted file mode 100644
index 2e33e7961ab2d7f9b16ea48cb680dd751af32d7a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/metatechniques.py
+++ /dev/null
@@ -1,186 +0,0 @@
-import abc
-import logging
-from collections import deque, defaultdict
-from fn import _
-
-from .technique import SearchTechniqueBase
-
-log = logging.getLogger(__name__)
-
-class MetaSearchTechnique(SearchTechniqueBase):
-  """
-  a technique made up of a collection of other techniques
-  """
-  def __init__(self, techniques, log_freq = 500, *pargs, **kwargs):
-    super(MetaSearchTechnique, self).__init__(*pargs, **kwargs)
-    self.techniques = techniques
-    self.request_count = 0
-    self.log_freq = log_freq
-    self.logging_use_counters = defaultdict(int)
-    self.unique_names()
-
-  def unique_names(self):
-    names = set()
-    for t in self.techniques:
-      while t.name in names:
-        t.name += '~'
-      t.name = intern(t.name)
-      names.add(t.name)
-
-  def set_driver(self, driver):
-    super(MetaSearchTechnique, self).set_driver(driver)
-    for t in self.techniques:
-      t.set_driver(driver)
-    self.driver = driver
-
-  def desired_result(self):
-    techniques = self.select_technique_order()
-    for technique in techniques:
-      dr = technique.desired_result()
-      if dr is not None:
-        if dr is False:
-          # technique is waiting for results
-          continue
-        self.driver.register_result_callback(dr,
-            lambda result: self.on_technique_result(technique, result))
-        if self.log_freq:
-          self.logging_use_counters[technique.name] += 1
-          self.debug_log()
-        self.request_count += 1
-        return dr
-      else:
-        self.on_technique_no_desired_result(technique)
-    return None
-
-  def on_technique_no_desired_result(self, technique):
-    """called if a sub-technique returns None"""
-    pass
-
-  def on_technique_result(self, technique, result):
-    """callback for results of sub-techniques"""
-    pass
-
-  @abc.abstractmethod
-  def select_technique_order(self):
-    """select the order of next techniques to try"""
-    return []
-
-  def debug_log(self):
-    if self.log_freq and sum(self.logging_use_counters.values())>self.log_freq:
-      log.info("%s: %s", self.name,
-          str(sorted(self.logging_use_counters.items(), key = _[1]*-1)))
-      self.logging_use_counters = defaultdict(int)
-
-class RoundRobinMetaSearchTechnique(MetaSearchTechnique):
-  """evenly switch between all source techniques"""
-  def __init__(self, techniques, **kwargs):
-    techniques = deque(techniques)
-    super(RoundRobinMetaSearchTechnique, self).__init__(techniques, **kwargs)
-
-  def select_technique_order(self):
-    rv = list(self.techniques)
-    self.techniques.rotate(1)
-    return rv
-
-class RecyclingMetaTechnique(MetaSearchTechnique):
-  """
-  periodically restart techniques that are not performing well compared to
-  global best
-  """
-  def __init__(self,
-               techniques_generators,
-               window = 100,
-               factor = 5.0,
-               **kwargs):
-    if 'log_freq' not in kwargs:
-      kwargs['log_freq'] = None
-    techniques = deque((g(seed_cfg = None) for g in techniques_generators))
-    self.rename_i = 0
-    for t in techniques:
-      self.rename_technique(t)
-    super(RecyclingMetaTechnique, self).__init__(techniques, **kwargs)
-    self.best_results = defaultdict(lambda: None)
-    self.factor = factor
-    self.last_check = 0
-    self.old_best_results = defaultdict(lambda: None)
-    self.technique_generators = deque(techniques_generators)
-    self.window = window
-
-  def rename_technique(self, technique):
-    technique.name += ".R%d" % self.rename_i
-    self.rename_i += 1
-
-  def on_technique_result(self, technique, result):
-    """callback for results of sub-techniques"""
-    if (self.best_results[technique] is None or
-        self.driver.objective.lt(result, self.best_results[technique])):
-      self.best_results[technique] = result
-
-  def technique_cmp(self, a, b):
-  # a1 = self.old_best_results[a]
-  # a2 = self.best_results[a]
-  # b1 = self.old_best_results[b]
-  # b2 = self.best_results[b]
-  # if a1 is None and b1 is None:
-  #   return 0
-  # if a1 is None:
-  #   return -1
-  # if b1 is None:
-  #   return 1
-  # return self.driver.objective.project_compare(a1, a2, b1, b2, self.factor)
-
-    # not ready techniques go to the back
-    if not a.is_ready() or not b.is_ready():
-      return cmp(b.is_ready(), a.is_ready())
-
-    a = self.best_results[a]
-    b = self.best_results[b]
-    if a is None and b is None:
-      return 0
-    if a is None:
-      return -1
-    if b is None:
-      return 1
-    return self.driver.objective.compare(a, b)
-
-  def recycle_techniques(self):
-    techniques = list(self.techniques)
-    techniques.sort(cmp=self.technique_cmp)
-    worst = techniques[-1]
-
-    if (not worst.is_ready()
-        or (self.old_best_results[worst] is not None
-            and self.driver.objective.lt(self.driver.best_result,
-                                         self.best_results[worst]))):
-      techniques_new = deque()
-      tn = None
-      for t, gen in zip(self.techniques, self.technique_generators):
-        if t is worst:
-          tn = gen(seed_cfg=self.driver.best_result.configuration.data)
-          self.rename_technique(tn)
-          tn.set_driver(self.driver)
-          log.info("%s replacing %s with %s", self.name, t.name, tn.name)
-          techniques_new.append(tn)
-        else:
-          techniques_new.append(t)
-      self.techniques = techniques_new
-    else:
-      log.debug("%s: not replacing techniques", self.name)
-
-    self.old_best_results = self.best_results
-    self.best_results = defaultdict(lambda: None)
-    for t in self.techniques:
-      self.best_results[t] = self.old_best_results[t]
-
-  def select_technique_order(self):
-    """
-    round robin between techniques
-    """
-    if self.last_check + self.window < self.request_count:
-      self.last_check = self.request_count
-      self.recycle_techniques()
-    rv = list(self.techniques)
-    self.techniques.rotate(1)
-    self.technique_generators.rotate(1)
-    return rv
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/objective.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/objective.py
deleted file mode 100644
index b46a2f54b2f0922f774548c1c2d009ffa581512e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/objective.py
+++ /dev/null
@@ -1,338 +0,0 @@
-import abc
-import logging
-
-from fn import _
-
-import opentuner
-from opentuner.resultsdb.models import *
-
-log = logging.getLogger(__name__)
-
-
-class SearchObjective(object):
-  """
-  delegates the comparison of results and configurations
-  """
-  __metaclass__ = abc.ABCMeta
-
-  @abc.abstractmethod
-  def result_order_by_terms(self):
-    """return database columns required to order by the objective"""
-    return []
-
-  @abc.abstractmethod
-  def result_compare(self, result1, result2):
-    """cmp() compatible comparison of resultsdb.models.Result"""
-    return
-
-  def config_compare(self, config1, config2):
-    """cmp() compatible comparison of resultsdb.models.Configuration"""
-    return self.result_compare(self.driver.results_query(config=config1).one(),
-                               self.driver.results_query(config=config2).one())
-
-  @abc.abstractmethod
-  def result_relative(self, result1, result2):
-    """return None, or a relative goodness of resultsdb.models.Result"""
-    return
-
-  def config_relative(self, config1, config2):
-    """return None, or a relative goodness of resultsdb.models.Configuration"""
-    return self.result_relative(self.driver.results_query(config=config1).one(),
-                                self.driver.results_query(config=config2).one())
-
-
-  def __init__(self):
-    self.driver = None
-
-  def set_driver(self, driver):
-    self.driver = driver
-
-  def result_order_by(self, q):
-    return q.order_by(*self.result_order_by_terms())
-
-  def compare(self, a, b):
-    """cmp() compatible compare"""
-    if isinstance(a, Configuration):
-      return self.config_compare(a, b)
-    if isinstance(a, Result):
-      return self.result_compare(a, b)
-    assert False
-
-  def relative(self, a, b):
-    if isinstance(a, Configuration):
-      return self.config_relative(a, b)
-    if isinstance(a, Result):
-      return self.result_relative(a, b)
-    assert None
-
-  def lt(self, a, b):
-    return self.compare(a, b) < 0
-
-  def lte(self, a, b):
-    return self.compare(a, b) <= 0
-
-  def gt(self, a, b):
-    return self.compare(a, b) > 0
-
-  def gte(self, a, b):
-    return self.compare(a, b) >= 0
-
-  def min(self, *l):
-    if len(l) == 1:
-      l = l[0]
-    rv = l[0]
-    for i in l[1:]:
-      if self.lt(i, rv):
-        rv = i
-    return rv
-
-  def max(self, *l):
-    if len(l) == 1:
-      l = l[0]
-    rv = l[0]
-    for i in l[1:]:
-      if self.gt(i, rv):
-        rv = i
-    return rv
-
-  def limit_from_config(self, config):
-    """
-    a time limit to kill a result after such that it can be compared to config
-    """
-    results = self.driver.results_query(config=config)
-    if results.count() == 0:
-      return None
-    else:
-      return max(map(_.time, self.driver.results_query(config=config)))
-
-
-  def project_compare(self, a1, a2, b1, b2, factor=1.0):
-    """
-    linearly project both a and b forward to see how they will compare in the
-    future
-    """
-    a3 = Result()
-    b3 = Result()
-    a3.time = _project(a1.time, a2.time, factor)
-    a3.accuracy = _project(a1.accuracy, a2.accuracy, factor)
-    a3.energy = _project(a1.energy, a2.energy, factor)
-    a3.confidence = _project(a1.confidence, a2.confidence, factor)
-    return self.result_compare(a3, b3)
-
-  def display(self, result):
-    """
-    produce a string version of a resultsdb.models.Result()
-    """
-    rv = []
-    for k in ('time', 'accuracy', 'energy', 'size', 'confidence'):
-      v = getattr(result, k)
-      if v is not None:
-        rv.append('%s=%.4f' % (k, float(v)))
-    return ', '.join(rv)
-
-  def filter_acceptable(self, query):
-    """Return a Result() query that only returns acceptable results"""
-    return query
-
-  def is_acceptable(self, result):
-    """Test if a Result() meets thresholds"""
-    return True
-
-  def stats_quality_score(self, result, worst_result, best_result):
-    """return a score for statistics"""
-    if not self.is_acceptable(result):
-      return worst_result.time
-    else:
-      return result.time
-
-
-def _project(a1, a2, factor):
-  if a1 is None or a2 is None:
-    return None
-  return a2 + factor * (a2 - a1)
-
-
-class MinimizeTime(SearchObjective):
-  """
-  minimize Result().time
-  """
-
-  def result_order_by_terms(self):
-    """return database columns required to order by the objective"""
-    return [Result.time]
-
-  def result_compare(self, result1, result2):
-    """cmp() compatible comparison of resultsdb.models.Result"""
-    return cmp(result1.time, result2.time)
-
-  def config_compare(self, config1, config2):
-    """cmp() compatible comparison of resultsdb.models.Configuration"""
-    return cmp(min(map(_.time, self.driver.results_query(config=config1))),
-               min(map(_.time, self.driver.results_query(config=config2))))
-
-  def result_relative(self, result1, result2):
-    """return None, or a relative goodness of resultsdb.models.Result"""
-    if result2.time == 0:
-      return float('inf') * result1.time
-    return result1.time / result2.time
-
-class MinimizeSize(SearchObjective):
-
-  def result_order_by_terms(self):
-    """return database columns required to order by the objective"""
-    return [Result.size]
-
-  def result_compare(self, result1, result2):
-    """cmp() compatible comparison of resultsdb.models.Result"""
-    return cmp(result1.size, result2.size)
-
-  def result_relative(self, result1, result2):
-    """return None, or a relative goodness of resultsdb.models.Result"""
-    if result2.size == 0:
-      return float('inf') * result1.size
-    return result1.size / result2.size
-
-
-class MinimizeSizeMinimizeTime(SearchObjective):
-  """
-  minimize Result.size() and Result.time()
-  """
-  def result_order_by_terms(self):
-    """return database columns required to order by the objective"""
-    return [Result.time, Result.size]
-
-
-  def result_compare(self, result1, result2):
-    """cmp() compatible comparison of resultsdb.models.Result"""
-    return cmp((result1.time, result1.size),(result2.time,result2.size))
-
-  def result_relative(self, result1, result2):
-    """return None, or a relative goodness of resultsdb.models.Result"""
-    log.warning('result_relative() not yet implemented for %s',
-                self.__class__.__name__)
-
-class MaximizeAccuracy(SearchObjective):
-  """
-  maximize Result().accuracy
-  """
-
-  def result_order_by_terms(self):
-    """return database columns required to order by the objective"""
-    return [-Result.accuracy]
-
-  def result_compare(self, result1, result2):
-    """cmp() compatible comparison of resultsdb.models.Result"""
-    # note opposite order
-    return cmp(result2.accuracy, result1.accuracy)
-
-  def result_relative(self, result1, result2):
-    """return None, or a relative goodness of resultsdb.models.Result"""
-    # note opposite order
-    if result1.accuracy == 0:
-      return float('inf') * result2.accuracy
-    return result2.accuracy / result1.accuracy
-
-  def stats_quality_score(self, result, worst_result, best_result):
-    """return a score for statistics"""
-    if not self.is_acceptable(result):
-      return worst_result.time
-    else:
-      return result.time
-
-  def stats_raw_score(self, result):
-    return result.accuracy
-
-
-class MaximizeAccuracyMinimizeSize(MaximizeAccuracy):
-  """
-  maximize Result().accuracy, break ties with Result().size
-  """
-
-  def result_order_by_terms(self):
-    """return database columns required to order by the objective"""
-    return [-Result.accuracy, Result.size]
-
-  def result_compare(self, result1, result2):
-    """cmp() compatible comparison of resultsdb.models.Result"""
-    return cmp((-result1.accuracy, result1.size),
-               (-result2.accuracy, result2.size))
-
-  def display(self, result):
-    """
-    produce a string version of a resultsdb.models.Result()
-    """
-    return "accuracy=%.8f, size=%.1f" % (result.accuracy, result.size)
-
-  def result_relative(self, result1, result2):
-    """return None, or a relative goodness of resultsdb.models.Result"""
-    # unimplemented for now
-    log.warning('result_relative() not yet implemented for %s',
-                self.__class__.__name__)
-    return None
-
-
-class ThresholdAccuracyMinimizeTime(SearchObjective):
-  """
-  if accuracy >= target:
-    minimize time
-  else:
-    maximize accuracy
-  """
-
-  def __init__(self, accuracy_target, low_accuracy_limit_multiplier=10.0):
-    self.accuracy_target = accuracy_target
-    self.low_accuracy_limit_multiplier = low_accuracy_limit_multiplier
-    super(ThresholdAccuracyMinimizeTime, self).__init__()
-
-  def result_order_by_terms(self):
-    """return database columns required to order by the objective"""
-
-    return ["min(accuracy, %f) desc" % self.accuracy_target,
-            opentuner.resultsdb.models.Result.time]
-
-  def result_compare(self, result1, result2):
-    """cmp() compatible comparison of resultsdb.models.Result"""
-    return cmp((-min(self.accuracy_target, result1.accuracy),
-                result1.time),
-               (-min(self.accuracy_target, result2.accuracy), result2.time))
-
-  def config_compare(self, config1, config2):
-    """cmp() compatible comparison of resultsdb.models.Configuration"""
-    return self.result_compare(
-      self.driver.results_query(config=config1, objective_ordered=True)[0],
-      self.driver.results_query(config=config2, objective_ordered=True)[0])
-
-  def limit_from_config(self, config):
-    """
-    a time limit to kill a result after such that it can be compared to config
-    """
-    results = self.driver.results_query(config=config)
-    if results.count() == 0:
-      return None
-    if self.accuracy_target > min(map(_.accuracy, results)):
-      m = self.low_accuracy_limit_multiplier
-    else:
-      m = 1.0
-    return m * max(map(_.time, results))
-
-
-  def filter_acceptable(self, query):
-    """Return a Result() query that only returns acceptable results"""
-    return query.filter(opentuner.resultsdb.models.Result.accuracy
-                        >= self.accuracy_target)
-
-  def is_acceptable(self, result):
-    """Test if a Result() meets thresholds"""
-    return result.accuracy >= self.accuracy_target
-
-  def result_relative(self, result1, result2):
-    """return None, or a relative goodness of resultsdb.models.Result"""
-    # unimplemented for now
-    log.warning('result_relative() not yet implemented for %s',
-                self.__class__.__name__)
-    return None
-
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/patternsearch.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/patternsearch.py
deleted file mode 100644
index 7b526e7897f2c673552899ae3a115d6e2e06737b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/patternsearch.py
+++ /dev/null
@@ -1,72 +0,0 @@
-
-
-from opentuner.search import technique
-
-class PatternSearch(technique.SequentialSearchTechnique):
-  def main_generator(self):
-
-    objective   = self.objective
-    driver      = self.driver
-    manipulator = self.manipulator
-
-    # start at a random position
-    center = driver.get_configuration(manipulator.random())
-    self.yield_nonblocking(center)
-
-    # initial step size is arbitrary
-    step_size = 0.1
-
-    while True:
-      points = list()
-      for param in manipulator.parameters(center.data):
-        if param.is_primitive():
-          # get current value of param, scaled to be in range [0.0, 1.0]
-          unit_value = param.get_unit_value(center.data)
-
-          if unit_value > 0.0:
-            # produce new config with param set step_size lower
-            down_cfg = manipulator.copy(center.data)
-            param.set_unit_value(down_cfg, max(0.0, unit_value - step_size))
-            down_cfg = driver.get_configuration(down_cfg)
-            self.yield_nonblocking(down_cfg)
-            points.append(down_cfg)
-
-          if unit_value < 1.0:
-            # produce new config with param set step_size higher
-            up_cfg = manipulator.copy(center.data)
-            param.set_unit_value(up_cfg, min(1.0, unit_value + step_size))
-            up_cfg = driver.get_configuration(up_cfg)
-            self.yield_nonblocking(up_cfg)
-            points.append(up_cfg)
-
-        else: # ComplexParameter
-          for mutate_function in param.manipulators(center.data):
-            cfg = manipulator.copy(center.data)
-            mutate_function(cfg)
-            cfg = driver.get_configuration(cfg)
-            self.yield_nonblocking(cfg)
-            points.append(cfg)
-
-
-      yield None # wait for all results
-
-      #sort points by quality, best point will be points[0], worst is points[-1]
-      points.sort(cmp=objective.compare)
-
-      if (objective.lt(driver.best_result.configuration, center)
-          and driver.best_result.configuration != points[0]):
-        # another technique found a new global best, switch to that
-        center = driver.best_result.configuration
-      elif objective.lt(points[0], center):
-        # we found a better point, move there
-        center = points[0]
-      else:
-        # no better point, shrink the pattern
-        step_size /= 2.0
-
-# register our new technique in global list
-technique.register(PatternSearch())
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/plugin.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/plugin.py
deleted file mode 100644
index ad8481837cbee62ba8c3f1c94a27529261953bb0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/plugin.py
+++ /dev/null
@@ -1,152 +0,0 @@
-import abc
-import argparse
-import logging
-import time
-
-from datetime import datetime
-from fn import _
-
-log = logging.getLogger(__name__)
-display_log = logging.getLogger(__name__ + ".DisplayPlugin")
-
-argparser = argparse.ArgumentParser(add_help=False)
-argparser.add_argument('--results-log',
-    help="file to store log of the best configuration times")
-argparser.add_argument('--results-log-details',
-    help="file to store log of the non-best configuration times")
-argparser.add_argument('--quiet', action='store_true',
-    help="print less information")
-argparser.add_argument('--display-frequency', default=10, type=int,
-    help="how often for DisplayPlugin to print")
-
-class SearchPlugin(object):
-  @property
-  def priority(self):
-    """control order the plugin hooks gets run in, lower runs first"""
-    return 0
-
-  def set_driver(self, driver):
-    """called before all other methods"""
-    self.driver = driver
-
-  def before_main(self): pass
-  def after_main(self):  pass
-
-  def before_techniques(self): pass
-  def after_techniques(self):  pass
-
-  def before_results_wait(self): pass
-  def after_results_wait(self):  pass
-
-  def on_result(self, result):
-    """
-    called once for every new result
-    """
-    pass
-
-  def on_result_for_technique(self, result, technique):
-    """
-    called right before a result is given to a technique
-    (result may be requested by multiple techniques)
-    """
-    pass
-
-  def on_new_best_result(self, result):
-    """
-    called whenever the global best result changes
-    """
-    pass
-
-class DisplayPlugin(SearchPlugin):
-  __metaclass__ = abc.ABCMeta
-  def __init__(self, display_period=5):
-    super(DisplayPlugin, self).__init__()
-    self.last  = time.time()
-    self.start = time.time()
-    self.display_period = display_period
-
-  def after_results_wait(self):
-    t = time.time()
-    if t - self.display_period > self.last:
-      # call display every 5 seconds
-      self.last = t
-      self.display(t)
-
-  def after_main(self):
-    self.display()
-
-  @abc.abstractmethod
-  def display(self, t=None):
-    pass
-
-
-class LogDisplayPlugin(DisplayPlugin):
-  def display(self, t=None):
-    if not t:
-      t = time.time()
-    count = self.driver.results_query().count()
-    best = self.driver.results_query(objective_ordered = True).first()
-    if best is None:
-      log.warning("no results yet")
-      return
-    requestor = ','.join(map(_.requestor, best.desired_results))
-    display_log.info("tests=%d, best %s, cost %s, found by %s",
-                     count,
-                     cfg_repr(best.configuration),
-                     self.driver.objective.display(best),
-                     requestor,
-                     )
-
-class FileDisplayPlugin(SearchPlugin):
-  def __init__(self, out, details, *args, **kwargs):
-    super(FileDisplayPlugin, self).__init__(*args, **kwargs)
-    self.last_best = float('inf')
-    self.start_date = datetime.now()
-    if out:
-      self.out = open(out, "w")
-    else:
-      self.out = None
-    if out == details:
-      self.details = self.out
-      self.out = None
-    elif details:
-      self.details = open(details, "w")
-    else:
-      self.details = None
-
-  def on_result(self, result):
-    if self.out and result.time < self.last_best:
-      self.last_best = result.time
-      print >>self.out, \
-          (result.collection_date - self.start_date).total_seconds(), \
-          result.time
-      self.out.flush()
-    if self.details:
-      print >>self.details, \
-          (result.collection_date - self.start_date).total_seconds(), \
-          result.time
-      self.details.flush()
-
-def get_enabled(args):
-  plugins = []
-  if not args.quiet:
-    plugins.append(LogDisplayPlugin(args.display_frequency))
-  if args.results_log or args.results_log_details:
-    plugins.append(FileDisplayPlugin(args.results_log,
-                                     args.results_log_details))
-  return plugins
-
-def cfg_repr(cfg):
-  try:
-    s = repr(cfg.data)
-    if len(s) < 100:
-      return s
-  except:
-    pass
-  return "#{0}".format(cfg.id)
-
-
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/pso.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/pso.py
deleted file mode 100644
index 3b8c37a7787b900a70f80ffab00d5c90b46c7541..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/pso.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# -*- coding: utf-8 -*-
-# vim: tabstop=2 shiftwidth=2 softtabstop=2 expandtab autoindent smarttab
-from manipulator import *
-from opentuner.search import technique
-import random
-import math
-
-class PSO(technique.SequentialSearchTechnique ):
-  """ Particle Swarm Optimization """
-  def __init__(self, crossover, N = 30, init_pop=None, *pargs, **kwargs):
-    """
-    crossover: name of crossover operator function
-    """
-    super(PSO, self).__init__(*pargs, **kwargs)
-    self.crossover = crossover
-    self.name = 'pso-'+crossover.replace("op3_cross_","")
-    self.init_pop = init_pop
-    self.N = N
-
-  def main_generator(self):
-
-    objective   = self.objective
-    driver    = self.driver
-    m = self.manipulator
-    def config(cfg):
-      return driver.get_configuration(cfg)
-  
-    population = self.init_pop
-    if not population:
-      population = [HybridParticle(m, self.crossover) for i in range(self.N)]
-
-    for p in population:
-      yield driver.get_configuration(p.position)
-
-    while True:
-      for particle in population:
-        g = driver.best_result.configuration.data
-        old=m.copy(particle.position)
-        particle.move(g)
-        yield config(particle.position)
-        # update individual best
-        if objective.lt(config(particle.position), config(particle.best)):
-          particle.best = particle.position
-
-class HybridParticle(object):
-  def __init__(self, m, crossover_choice, omega=0.5, phi_l=0.5, phi_g=0.5):
-
-    """
-    m: a configuraiton manipulator
-    omega: influence of the particle's last velocity, a float in range [0,1] ; omega=1 means even speed
-    phi_l: influence of the particle's distance to its historial best position, a float in range [0,1]
-    phi_g: influence of the particle's distance to the global best position, a float in range [0,1]
-    """
-
-    self.manipulator = m
-    self.position = self.manipulator.random()   
-    self.best = self.position
-    self.omega = omega
-    self.phi_l = phi_l
-    self.phi_g = phi_g
-    self.crossover_choice = crossover_choice
-    self.velocity = {}
-    for p in self.manipulator.params:
-      # Velocity as a continous value
-      self.velocity[p.name]=0  
-
-  def move(self, global_best):
-    """
-    Update parameter values using corresponding operators. 
-    TODO: introduce operator choice map
-    """
-    m = self.manipulator
-    for p in m.params:
-      self.velocity[p.name] = p.op3_swarm(self.position, global_best, self.best, c=self.omega, c1=self.phi_g, c2=self.phi_l, xchoice=self.crossover_choice, velocity=self.velocity[p.name])
-
-
-technique.register(PSO(crossover = 'op3_cross_OX3'))
-technique.register(PSO(crossover = 'op3_cross_OX1'))
-technique.register(PSO(crossover = 'op3_cross_PMX'))
-technique.register(PSO(crossover = 'op3_cross_PX'))
-technique.register(PSO(crossover = 'op3_cross_CX'))
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/simplextechniques.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/simplextechniques.py
deleted file mode 100644
index 3cfec0eebb25cf3c7ff2cc2bc69d558454660e32..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/simplextechniques.py
+++ /dev/null
@@ -1,457 +0,0 @@
-import abc
-import logging
-import math
-from collections import defaultdict
-from fn import _
-from fn.iters import map, filter
-from .manipulator import Parameter
-from .metatechniques import RecyclingMetaTechnique
-from .technique import SequentialSearchTechnique, register
-
-log = logging.getLogger(__name__)
-
-
-class SimplexTechnique(SequentialSearchTechnique):
-  """
-  Base class with utility functions common
-  to simplex type methods
-  """
-
-  def __init__(self, seed_cfg=None, *args, **kwargs):
-    super(SimplexTechnique, self).__init__(*args, **kwargs)
-    self.centroid = None
-    self.last_simplex_points = None
-    self.seed_cfg = seed_cfg
-    self.simplex_points = []
-
-  def calculate_centroid(self):
-    """
-    average of all the PrimitiveParameters in self.simplex_points
-    ComplexParameters are copied from self.simplex_points[0]
-    """
-    sums = defaultdict(float)
-    counts = defaultdict(int)
-
-    for config in self.simplex_points:
-      cfg = config.data
-      for param in self.manipulator.parameters(cfg):
-        if param.is_primitive():
-          sums[param.name] += param.get_unit_value(cfg)
-          counts[param.name] += 1
-
-    centroid = self.manipulator.copy(self.simplex_points[0].data)
-    for param in self.manipulator.parameters(centroid):
-      if param.is_primitive():
-        param.set_unit_value(centroid,
-                             sums[param.name] / float(counts[param.name]))
-
-    return centroid
-
-  def cfg_to_str(self, cfg):
-    params = list(filter(Parameter.is_primitive,
-                         self.manipulator.parameters(cfg)))
-    params.sort(key=_.name)
-    return str(tuple(map(lambda x: x.get_unit_value(cfg), params)))
-
-  def debug_log(self):
-    for i, config in enumerate(self.simplex_points):
-      log.debug("simplex_points[%d] = %s", i, self.cfg_to_str(config.data))
-    if self.centroid:
-      log.debug("centroid = %s", self.cfg_to_str(self.centroid))
-
-  def linear_point(self, p1, p2, scale):
-    """
-    return a point on the line passing between p1 and p2 at position scale
-    such that p1 + scale*(p1 - p2)
-    """
-    return self.manipulator.linear_config(1.0, p1, scale, p1, -scale, p2)
-
-  def convergence_criterea(self):
-    """True will cause the simplex method to stop"""
-    if self.rounds_since_novel_request > 3 * len(self.simplex_points) + 1:
-      return True
-    if self.last_simplex_points == self.simplex_points:
-      return True
-    self.last_simplex_points = list(self.simplex_points)
-    return False
-
-  def initial_simplex_seed(self):
-    """
-    return a point to base the initial simplex on
-    """
-    if self.seed_cfg is not None:
-      return self.seed_cfg
-    return self.manipulator.random()
-
-  @abc.abstractmethod
-  def initial_simplex(self):
-    """
-    return a initial list of configurations
-    """
-    return []
-
-
-class RandomInitialMixin(object):
-  """
-  start with random initial simplex
-  """
-
-  def initial_simplex(self):
-    # we implicitly assume number of parameters is fixed here, however
-    # it will work if it isn't (simplex size is undefined)
-    cfg0 = self.initial_simplex_seed()
-    params = self.manipulator.parameters(cfg0)
-    return [cfg0] + [self.manipulator.random()
-                     for p in params
-                     if p.is_primitive()]
-
-
-class RightInitialMixin(object):
-  """
-  start with random initial right triangle like simplex
-  """
-
-  def __init__(self, initial_unit_edge_length=0.1, *args, **kwargs):
-    assert initial_unit_edge_length <= 0.5
-    self.initial_unit_edge_length = initial_unit_edge_length
-    super(RightInitialMixin, self).__init__(*args, **kwargs)
-
-  def initial_simplex(self):
-    cfg0 = self.initial_simplex_seed()
-    simplex = [cfg0]
-    params = self.manipulator.parameters(cfg0)
-    params = filter(lambda x: x.is_primitive(), params)
-    for p in params:
-      simplex.append(self.manipulator.copy(cfg0))
-      v = p.get_unit_value(simplex[-1])
-      if v <= 0.5:
-        v += self.initial_unit_edge_length
-      else:
-        v -= self.initial_unit_edge_length
-      p.set_unit_value(simplex[-1], v)
-    return simplex
-
-
-class RegularInitialMixin(object):
-  """
-  start with random initial regular simplex (all edges equal length)
-  """
-
-  def __init__(self, initial_unit_edge_length=0.1, *args, **kwargs):
-    assert initial_unit_edge_length <= 0.5
-    self.initial_unit_edge_length = initial_unit_edge_length
-    super(RegularInitialMixin, self).__init__(*args, **kwargs)
-
-  def initial_simplex(self):
-    cfg0 = self.initial_simplex_seed()
-    simplex = [cfg0]
-    params = self.manipulator.parameters(cfg0)
-    params = list(filter(lambda x: x.is_primitive(), params))
-    if len(params) == 0:
-      return simplex
-
-    q = (((math.sqrt(len(params) + 1.0) - 1.0) / (len(params) * math.sqrt(2.0)))
-         * self.initial_unit_edge_length)
-    p = q + ((1.0 / math.sqrt(2.0)) * self.initial_unit_edge_length)
-
-    base = [x.get_unit_value(cfg0) for x in params]
-    for j in xrange(len(base)):
-      if max(p, q) + base[j] > 1.0:
-        #flip this dimension as we would overflow our [0,1] bounds
-        base[j] *= -1.0
-
-    for i in xrange(len(params)):
-      simplex.append(self.manipulator.copy(cfg0))
-      params[i].set_unit_value(simplex[-1], abs(base[i] + p))
-      for j in xrange(i + 1, len(params)):
-        params[j].set_unit_value(simplex[-1], abs(base[i] + q))
-
-    return simplex
-
-
-class NelderMead(SimplexTechnique):
-  """
-  Nelder-Mead downhill simplex method.
-
-  Based on description of method on page 82 of
-  'Noisy Optimization With Evolution Strategies' by Dirk V. Arnold.
-
-  We set alpha=2.0 by default instead of the often recommended alpha=1.0 to
-  avoid a common degenerate case, where the volume of the simplex becomes zero.
-  This is easiest to see with a single parameter. Let the simplex points
-  be x0,x1.  Let the centroid be c=(x0+x1)/2.0 and the reflection point be:
-  reflection = c + alpha*(c-x1) = (x0+x1)*(1+alpha)/2 - x1
-  The problem is, if we set alpha = 1.0, then the x1's cancel out and the
-  reflection point becomes just reflection=x0, which also happens to be the
-  second best point, meaning we will use it.  So in a single step of the
-  algorithm the simplex becomes singular.
-  """
-
-  def __init__(self,
-               alpha=2.0,
-               gamma=2.0,
-               beta=0.5,
-               sigma=0.5,
-               *args, **kwargs):
-    self.alpha = alpha
-    self.gamma = gamma
-    self.beta = beta
-    self.sigma = sigma
-    super(NelderMead, self).__init__(*args, **kwargs)
-
-  @classmethod
-  def get_hyper_parameters(cls):
-    return ['alpha', 'gamma', 'beta', 'sigma']
-
-
-  def main_generator(self):
-    objective = self.objective
-    driver = self.driver
-
-    # test the entire initial simplex
-    self.simplex_points = list(map(driver.get_configuration,
-                                   self.initial_simplex()))
-
-    if len(self.simplex_points) <= 1:
-      log.warning("only 1 point in simplex, will not use %s", self.name)
-      return
-
-    log.debug("initial points")
-    for p in self.simplex_points:
-      self.yield_nonblocking(p)
-    yield None  # wait until results are ready
-
-    while not self.convergence_criterea():
-      # next steps assume this ordering
-      self.simplex_points.sort(cmp=objective.compare)
-      # set limit from worst point
-      self.limit = objective.limit_from_config(self.simplex_points[-1])
-      self.centroid = self.calculate_centroid()
-      if log.isEnabledFor(logging.DEBUG):
-        self.debug_log()
-
-      reflection = self.reflection_point()
-      yield reflection
-
-      if objective.lt(reflection, self.simplex_points[0]):
-        #expansion case
-        expansion = self.expansion_point(reflection)
-        yield expansion
-
-        if objective.lt(expansion, reflection):
-          log.debug("using expansion point")
-          self.simplex_points[-1] = expansion
-        else:
-          log.debug("using reflection point (considered expansion)")
-          self.simplex_points[-1] = reflection
-
-      elif objective.lt(reflection, self.simplex_points[1]):
-        #reflection case
-        log.debug("using reflection point")
-        self.simplex_points[-1] = reflection
-      else:
-        # contraction case
-        if objective.lte(reflection, self.simplex_points[-1]):
-          # outside contraction
-          contract_base = reflection
-        else:
-          # inside contraction
-          contract_base = self.simplex_points[-1]
-
-        contraction = self.contraction_point(contract_base)
-        yield contraction
-
-        if objective.lte(contraction, contract_base):
-          log.debug("using contraction point")
-          self.simplex_points[-1] = contraction
-        else:
-          #reduction case
-          log.debug("performing shrink reduction")
-          self.perform_shrink_reduction()
-          for p in self.simplex_points:
-            self.yield_nonblocking(p)
-          yield None  # wait until results are ready
-
-  def reflection_point(self):
-    """
-    reflect worst point across centroid
-    """
-    return self.driver.get_configuration(
-        self.linear_point(self.centroid,
-                          self.simplex_points[-1].data,
-                          self.alpha))
-
-  def expansion_point(self, reflection):
-    """
-    reflect worst point across centroid more (by default 2x as much)
-    """
-    return self.driver.get_configuration(
-        self.linear_point(self.centroid,
-                          reflection.data,
-                          -self.gamma))
-
-  def contraction_point(self, contract_base):
-    """
-    reflect worst point across centroid less
-    """
-    return self.driver.get_configuration(
-        self.linear_point(self.centroid,
-                          contract_base.data,
-                          -self.beta))
-
-  def perform_shrink_reduction(self):
-    """
-    shrink the simplex in size by sigma=1/2 (default), moving it closer to the
-    best point
-    """
-    for i in xrange(1, len(self.simplex_points)):
-      self.simplex_points[i] = self.driver.get_configuration(
-          self.linear_point(self.simplex_points[0].data,
-                            self.simplex_points[i].data,
-                            -self.sigma))
-
-
-class Torczon(SimplexTechnique):
-  """
-  Torczon multi-directional search algorithm.
-
-  Based on description of method on page 85 of
-  'Noisy Optimization With Evolution Strategies' by Dirk V. Arnold.
-  """
-
-  def __init__(self,
-               alpha=1.0,
-               gamma=2.0,
-               beta=0.5,
-               *args, **kwargs):
-    self.alpha = alpha
-    self.gamma = gamma
-    self.beta = beta
-    super(Torczon, self).__init__(*args, **kwargs)
-
-  @classmethod
-  def get_hyper_parameters(cls):
-    return ['alpha', 'gamma', 'beta']
-
-
-  def main_generator(self):
-    objective = self.objective
-    driver = self.driver
-
-    # test the entire initial simplex
-    self.simplex_points = list(map(driver.get_configuration,
-                                   self.initial_simplex()))
-    if len(self.simplex_points) <= 1:
-      log.warning("only 1 point in simplex, will not use %s", self.name)
-      return
-
-    log.debug("initial points")
-    for p in self.simplex_points:
-      self.yield_nonblocking(p)
-    yield None  # wait until results are ready
-    self.simplex_points.sort(cmp=objective.compare)
-
-    while not self.convergence_criterea():
-      # set limit from worst point
-      self.limit = objective.limit_from_config(self.simplex_points[-1])
-
-      if log.isEnabledFor(logging.DEBUG):
-        self.debug_log()
-
-      reflected = self.reflected_simplex()
-      yield None  # wait until results are ready
-      reflected.sort(cmp=objective.compare)
-
-      # this next condition implies reflected[0] < simplex_points[0] since
-      # reflected is sorted and contains simplex_points[0] (saves a db query)
-      if reflected[0] is not self.simplex_points[0]:
-        expanded = self.expanded_simplex()
-        yield None  # wait until results are ready
-        expanded.sort(cmp=objective.compare)
-
-        if objective.lt(expanded[0], reflected[0]):
-          log.debug("expansion performed")
-          self.simplex_points = expanded
-        else:
-          log.debug("reflection performed")
-          self.simplex_points = reflected
-      else:
-        contracted = self.contracted_simplex()
-        yield None  # wait until results are ready
-        contracted.sort(cmp=objective.compare)
-
-        log.debug("contraction performed")
-        self.simplex_points = contracted
-
-  def scaled_simplex(self, scale):
-    """
-    assumes self.simplex_points[0] is best point and returns a new simplex
-    reflected across self.simplex_points[0] by scale
-    """
-    simplex = list(self.simplex_points)  # shallow copy
-    for i in xrange(1, len(simplex)):
-      simplex[i] = self.driver.get_configuration(
-          self.linear_point(simplex[0].data, simplex[i].data, scale))
-      self.yield_nonblocking(simplex[i])
-    return simplex
-
-  def reflected_simplex(self):
-    return self.scaled_simplex(self.alpha)
-
-  def expanded_simplex(self):
-    return self.scaled_simplex(self.gamma)
-
-  def contracted_simplex(self):
-    return self.scaled_simplex(-self.beta)
-
-
-class RandomNelderMead(RandomInitialMixin, NelderMead):
-  pass
-
-
-class RightNelderMead(RightInitialMixin, NelderMead):
-  pass
-
-
-class RegularNelderMead(RegularInitialMixin, NelderMead):
-  pass
-
-
-class RandomTorczon(RandomInitialMixin, Torczon):
-  pass
-
-
-class RightTorczon(RightInitialMixin, Torczon):
-  pass
-
-
-class RegularTorczon(RegularInitialMixin, Torczon):
-  pass
-
-
-class MultiNelderMead(RecyclingMetaTechnique):
-  def __init__(self):
-    super(MultiNelderMead, self).__init__([RightNelderMead, RandomNelderMead,
-                                           RegularNelderMead])
-
-
-class MultiTorczon(RecyclingMetaTechnique):
-  def __init__(self):
-    super(MultiTorczon, self).__init__([RightTorczon, RandomTorczon,
-                                        RegularTorczon])
-
-
-register(RandomNelderMead())
-register(RegularNelderMead())
-register(RightNelderMead())
-register(MultiNelderMead())
-register(RandomTorczon())
-register(RegularTorczon())
-register(RightTorczon())
-register(MultiTorczon())
-
-
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/simulatedannealing.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/simulatedannealing.py
deleted file mode 100644
index 45b315f2e6bbceda2822ae72623e8c0032afe66b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/simulatedannealing.py
+++ /dev/null
@@ -1,133 +0,0 @@
-from opentuner.search import technique
-import math
-import random
-#Default interval steps for cooling schedules
-DEFAULT_INTERVAL = 100
-
-#Pseudo-annealing - no relative energy input into acceptance function
-class PseudoAnnealingSearch(technique.SequentialSearchTechnique):
-  def __init__(self,
-               temps = [30,0], #temperature schedule
-               intervals = [],  #duration schedule
-          		 loop = True, #Do we loop the schedule if we reach the end?
-               *pargs, **kwargs):
-    #fill intervals sufficiently
-    ext_intervals = list(intervals)
-    for i in range(len(temps)-len(intervals)-1):
-      ext_intervals.append(DEFAULT_INTERVAL)
-            
-    #create temperature schedule (list of temps)
-    cool_schedule = [temps[0]]
-    for i in range(len(temps)-1):
-      step = (float(temps[i+1]) - temps[i])/ext_intervals[i]
-      for j in range(ext_intervals[i]):
-        cool_schedule.append(max(cool_schedule[-1] + step,0))
-      
-    self.cool_schedule = cool_schedule
-    self.loop = loop
-    self.scaling = 50 #scaling of acceptance function
-      
-    super(PseudoAnnealingSearch,self).__init__(*pargs,**kwargs)
-
-
-  def main_generator(self):
-    objective = self.objective
-    driver = self.driver
-    manipulator = self.manipulator
-
-    #Start in a random spot
-    state = driver.get_configuration(manipulator.random())
-    yield state
-    #schedule counter
-    counter = 0
-    max_time = len(self.cool_schedule)-1
-    #Check whether relative objective implemented
-    has_rel = objective.relative(state,state) is not None
-    has_rel=False
-              
-    while True:
-      #Determine temperature
-      temp = self.cool_schedule[min(counter,max_time)]
-      #scale stepsize with temp and time (arbitrary)
-      step_size = math.exp(-(20 + counter/100)/(temp+ 1)) 
-          
-      #get candidate neighbors using manipulator
-      points = list()
-      points.append(state)
-      for param in manipulator.parameters(state.data):
-        if param.is_primitive():
-          # get current value of param, scaled to be in range [0.0, 1.0]
-          unit_value = param.get_unit_value(state.data)
-          if unit_value > 0.0:
-            # produce new config with param set step_size lower
-            down_cfg = manipulator.copy(state.data)
-            param.set_unit_value(down_cfg, max(0.0, unit_value - step_size*random.random()))
-            down_cfg = driver.get_configuration(down_cfg)
-            self.yield_nonblocking(down_cfg)
-            points.append(down_cfg)
-
-          if unit_value < 1.0:
-            # produce new config with param set step_size higher
-            up_cfg = manipulator.copy(state.data)
-            param.set_unit_value(up_cfg, min(1.0, unit_value + step_size*random.random()))
-            up_cfg = driver.get_configuration(up_cfg)
-            self.yield_nonblocking(up_cfg)
-            points.append(up_cfg)
-        else: # ComplexParameter
-          for mutate_function in param.manipulators(state.data):
-            cfg = manipulator.copy(state.data)
-            mutate_function(cfg)
-            cfg = driver.get_configuration(cfg)
-            self.yield_nonblocking(cfg)
-            points.append(cfg)
-      yield None # wait for all results
-            
-      #Relative comparison implemented
-      if has_rel:
-        while True:
-          if len(points) == 0:
-            state = driver.best_result.configuration
-            break
-          candidate = points.pop(random.randint(0,len(points)-1))
-          #compare to global best
-          if random.random() < AcceptanceFunction(1, objective.relative(candidate,driver.best_result.configuration), temp, self.scaling):
-            state = candidate
-            break
-      #No relative compare
-      else:
-      #sort points by "energy" (quality)
-        points.sort(cmp=objective.compare)
-            
-        #Make decision about changing state
-        #probability picking next-best state is exp^(-1/temp)
-        #repeat and cycle to get state p-dist resembling this
-        sel = 0
-        while AcceptanceFunction(0,1,temp,1)>random.random():
-          sel += 1
-        state = points[sel%len(points)]
-            
-        #switch to the global best if temperature is low (i.e. we aren't moving much)
-        if AcceptanceFunction(0,1,temp,1)< .0001 and objective.lt(driver.best_result.configuration, state):
-          state = driver.best_result.configuration
-          
-      #update counter
-      counter +=1
-      if counter>max_time and self.loop:
-        counter=counter-max_time
-              
-
-#Acceptance probability function for annealing
-def AcceptanceFunction(e,e_new,temp,scaling):
-  #Standard acceptance probability function using relative "goodness"
-  if e>=e_new:
-    return 1
-  if temp == 0:
-    return 0
-  if scaling*(e_new-e)/temp > 10:
-    #for practical purposes, probability is too low.
-    return 0
-  return math.exp(scaling*(e-e_new)/temp)
-
-
-#register technique
-technique.register(PseudoAnnealingSearch())
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/technique.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/technique.py
deleted file mode 100644
index 849391df9bb37454301c90a520fbbe6b5025c683..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/search/technique.py
+++ /dev/null
@@ -1,358 +0,0 @@
-import abc
-import argparse
-import logging
-import os
-import random
-import sys
-
-from importlib import import_module
-from datetime import datetime
-from fn import _
-
-from opentuner.resultsdb.models import *
-from plugin import SearchPlugin
-
-log = logging.getLogger(__name__)
-#log.setLevel(logging.DEBUG)
-
-argparser = argparse.ArgumentParser(add_help=False)
-argparser.add_argument('--technique','-t', action='append',
-                       help="which technique to use")
-argparser.add_argument('--list-techniques','-lt', action='store_true',
-                       help="list techniques available and exit")
-argparser.add_argument('--generate-bandit-technique','-gbt', action='store_true',
-                       help="randomly generate a bandit to use")
-
-class SearchTechniqueBase(object):
-  """
-  abstract base class for search techniques, with minimal interface
-  """
-  __metaclass__ = abc.ABCMeta
-
-  def __init__(self, name = None):
-    super(SearchTechniqueBase, self).__init__()
-    if name:
-      self.name = name
-    else:
-      self.name = self.default_name()
-
-  def is_ready(self):
-    """test if enough data has been gathered to use this technique"""
-    return True
-
-  def default_name(self):
-    """name of this SearchTechnique uses for display/accounting"""
-    return self.__class__.__name__
-
-  def handle_requested_result(self, result):
-    """called for each new Result(), requested by this technique"""
-    pass
-
-  @abc.abstractmethod
-  def set_driver(self, driver):
-    """called at start of tuning process"""
-    return
-
-  @abc.abstractmethod
-  def desired_result(self):
-    """
-    return at most count resultsdb.models.DesiredResult objects based on past
-    performance
-    """
-    return
-
-class SearchTechnique(SearchPlugin, SearchTechniqueBase):
-  """
-  a search search technique with basic utility functions
-  """
-
-  def __init__(self, *pargs, **kwargs):
-    super(SearchTechnique, self).__init__(*pargs, **kwargs)
-    self.driver = None
-    self.manipulator = None
-    self.objective = None
-    self.request_count = 0
-
-  def set_driver(self, driver):
-    super(SearchTechnique, self).set_driver(driver)
-    self.manipulator = driver.manipulator
-    self.objective = driver.objective
-    driver.add_plugin(self)
-
-  def desired_result(self):
-    """
-    create and return a resultsdb.models.DesiredResult
-    returns None if no desired results and False if waiting for results
-    """
-    cfg = self.desired_configuration()
-    if cfg is None:
-      return None
-    if cfg is False:
-      return False
-    if type(cfg) is Configuration:
-      config = cfg
-    else:
-      config = self.driver.get_configuration(cfg)
-    desired = DesiredResult(configuration=config,
-                            requestor=self.name,
-                            generation=self.driver.generation,
-                            request_date=datetime.now(),
-                            tuning_run=self.driver.tuning_run)
-    if hasattr(self, 'limit'):
-      desired.limit = self.limit
-    self.driver.register_result_callback(desired, self.handle_requested_result)
-    self.request_count += 1
-    return desired
-
-  @abc.abstractmethod
-  def desired_configuration(self):
-    """
-    return a cfg that we should test
-    given a ConfigurationManipulator and SearchDriver
-    return None if there are no configurations to test
-    return False if waiting for results
-    """
-    return dict()
-
-  def handle_requested_result(self, result):
-    """called for each new Result(), regardless of who requested it"""
-    pass
-
-  def default_generated_name(self):
-    """ The default generated name for this technique """
-    return self.base_name()
-
-  def use_default_generated_name(self):
-    """ set the name of this technique to the default generated name """
-    self.name = self.default_generated_name()
-
-  def base_name(self):
-    """
-    Return the base name of this technique with form
-    classname;hyperparam1,v1;hyperparam2,v2 ...
-    where hyperparams are taken in order from get_hyper_parameters()
-
-    Should only be called after this technique has finished initializing.
-    """
-    out = [self.__class__.__name__]
-    for hyper_parameter in self.get_hyper_parameters():
-      # get hyperparam,v as a string and append
-      try:
-        out.append(hyper_parameter + ',' + str(getattr(self, hyper_parameter)))
-      except AttributeError:
-        log.error("Uninitialized hyper-parameter %s for technique %s.",
-                   hyper_parameter, self.__class__.__name__)
-
-    return ';'.join(out)
-
-  @classmethod
-  def get_hyper_parameters(cls):
-    """
-    return a list of hyper-parameters names for this technique
-
-    Name strings must match the corresponding attribute with the hyper-parameter
-    value on technique instances. Names should also match the key word argument
-    used when initializing an instance. Hyperparameters should only take literal
-    values.
-
-    For example, given hyper parameter "mutation_rate", then the __init__ method
-    should have 'mutation_rate' as a key word argument and later have the line
-    self.mutation_rate = mutation_rate
-    """
-    return []
-
-  @classmethod
-  def generate_technique(cls, manipulator=None, *args, **kwargs):
-    """ return a new technique based off this instance """
-    t = cls(*args, **kwargs)
-    t.use_default_generated_name()
-    return t
-
-class PureRandom(SearchTechnique):
-  """
-  request configurations completely randomly
-  """
-  def desired_configuration(self):
-    return self.manipulator.random()
-
-class AsyncProceduralSearchTechnique(SearchTechnique):
-  def __init__(self, *pargs, **kwargs):
-    super(AsyncProceduralSearchTechnique, self).__init__(*pargs, **kwargs)
-    self.gen = None
-    self.done = False
-    self.latest_results = []
-
-  def call_main_generator(self):
-    """passthrough (used in subclasses)"""
-    return self.main_generator()
-
-  def desired_configuration(self):
-    if self.gen is None:
-      log.debug("%s: creating generator", self.name)
-      self.gen = self.call_main_generator()
-    if not self.done:
-      try:
-        return self.gen.next()
-      except StopIteration:
-        log.debug("%s: generator finished", self.name)
-        self.done = True
-    return None
-
-  @abc.abstractmethod
-  def main_generator(self):
-    """
-    custom generator to conduct this search, should:
-    yield config
-    to request tests and call driver.get_results() to read the results
-
-    in AsyncProceduralSearchTechnique results are ready at an undefined
-    time (`yield False` to stall and wait for them)
-
-    in SequentialSearchTechnique results are ready after the yield
-    """
-    pass
-
-  def is_ready(self):
-    return not self.done
-
-class SequentialSearchTechnique(AsyncProceduralSearchTechnique):
-  def __init__(self, novelty_threshold=50, reset_threshold=500, *pargs, **kwargs):
-    super(SequentialSearchTechnique, self).__init__(*pargs, **kwargs)
-    self.pending_tests = []
-    self.novelty_threshold = novelty_threshold
-    self.rounds_since_novel_request = 0
-    self.reset_threshold = reset_threshold
-
-  def yield_nonblocking(self, cfg):
-    """
-    within self.main_generator() act like `yield cfg`, but don't wait for the
-    results until the following yield (spawn/sync style)
-    """
-    if cfg:
-      self.pending_tests.append(cfg)
-
-  def call_main_generator(self):
-    """insert waits for results after every yielded item"""
-    subgen = self.main_generator()
-    self.rounds_since_novel_request = 0
-    while True:
-      self.rounds_since_novel_request += 1
-      if (self.rounds_since_novel_request % self.novelty_threshold) == 0:
-        log.warning("%s has not requested a new result for %d rounds",
-                    self.name, self.rounds_since_novel_request)
-        if (self.rounds_since_novel_request > self.reset_threshold):
-          log.warning("%s is being reset", self.name)
-          subgen = self.main_generator()
-          self.rounds_since_novel_request = 0
-        yield None # give other techniques a shot
-      try:
-        p = subgen.next()
-        if p:
-          self.pending_tests.append(p)
-      except StopIteration:
-        return
-      finally:
-        for p in self.pending_tests:
-          if not self.driver.has_results(p):
-            self.rounds_since_novel_request = 0
-            yield p
-
-      # wait for all pending_tests to have results
-      c = 0
-      while self.pending_tests:
-        log.debug("%s: waiting for %d pending tests",
-                  self.name, len(self.pending_tests))
-        c += 1
-        if (c % 100) == 0:
-          log.error("%s: still waiting for %d pending tests (c=%d)",
-                     self.name, len(self.pending_tests), c)
-
-        self.pending_tests = filter(lambda x: not self.driver.has_results(x),
-                                    self.pending_tests)
-        if self.pending_tests:
-          self.rounds_since_novel_request = 0
-          yield False # wait
-
-#list of all techniques
-the_registry = list()
-
-#list of technique generators
-the_generator_registry = list()
-
-def register(t):
-  the_registry.append(t)
-
-def register_generator(cls, generator_weight=1.0, *args, **kwargs):
-  """
-  register a technique generator - a tuple of (technique class, args, kwargs)
-  where args and kwargs will be passed into the generate_technique classmethod -
-  with specified probability weight when randomly choosing a generator
-
-  :param cls: a technique class to use as a generator
-  :param generator_weight: probability weighting when randomly choosing a generator
-  :param args: arguments to pass into generate_technique class method
-  :param kwargs: arguments to pass into generate_technique class method
-  """
-  the_generator_registry.append(((cls, args, kwargs), generator_weight))
-
-register(PureRandom())
-
-def get_random_generator_technique(generators=None, manipulator=None):
-  """
-  Takes in a sequence of ((generator, args, kwargs), weight) tuples.
-  Returns a random generated technique info tuple
-
-  :param generators: optional argument to avoid repeated getting of generators
-  :param manipulator: manipulator to pass to generate_technique class method.
-  """
-  if generators is None:
-    techniques, generators = all_techniques()
-  g, args, kwargs = weighted_choice(generators)
-  return g.generate_technique(manipulator, *args, **kwargs)
-
-
-def weighted_choice(choices):
-  """ takes in a sequence of (choice, weight) tuples and randomly returns one """
-  total = sum(w for c, w in choices)
-  r = random.uniform(0, total)
-  upto = 0
-  for c, w in choices:
-    upto += w
-    if upto > r:
-      return c
-  return random.choice([c for c, w in choices])
-
-
-def all_techniques():
-  #import all modules in search to ensure techniques are Registered
-  for f in sorted(os.listdir(os.path.dirname(__file__))):
-    m = re.match(r'^(.*)[.]py$', f)
-    if m:
-      import_module('opentuner.search.'+m.group(1))
-
-  return the_registry, the_generator_registry
-
-def get_enabled(args):
-  techniques, generators = all_techniques()
-  if args.list_techniques:
-    for t in techniques:
-      print t.name
-    sys.exit(0)
-
-  if not args.technique:
-    # no techniques specified, default technique
-    args.technique = ['AUCBanditMetaTechniqueA']
-
-  for unknown in set(args.technique) - set(map(_.name, techniques)):
-    log.error('unknown technique %s', unknown)
-    raise Exception('Unknown technique: --technique={}'.format(unknown))
-
-  return [t for t in techniques if t.name in args.technique]
-
-def get_root(args):
-  from metatechniques import RoundRobinMetaSearchTechnique
-  enabled = get_enabled(args)
-  if len(enabled) == 1:
-    return enabled[0]
-  return RoundRobinMetaSearchTechnique(get_enabled(args))
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/tuningrunmain.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/tuningrunmain.py
deleted file mode 100644
index 9bcf1b5270286ee405373822d3919ae8854a24c3..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/tuningrunmain.py
+++ /dev/null
@@ -1,224 +0,0 @@
-# vim: tabstop=2 shiftwidth=2 softtabstop=2 expandtab autoindent smarttab
-import argparse
-import copy
-import inspect
-import logging
-import math
-import os
-import socket
-import sys
-import time
-import uuid
-from datetime import datetime
-
-from opentuner import resultsdb
-from opentuner.search.driver import SearchDriver
-from opentuner.measurement.driver import MeasurementDriver
-
-log = logging.getLogger(__name__)
-
-argparser = argparse.ArgumentParser(add_help=False)
-argparser.add_argument('--label',
-                       help="name for the TuningRun")
-argparser.add_argument('--print-search-space-size', action='store_true',
-                       help="Print out the estimated size of the search space and exit")
-argparser.add_argument('--database',
-                       help=("database to store tuning results in, see: "
-                             "http://docs.sqlalchemy.org/en/rel_0_8/core/engines.html#database-urls"))
-argparser.add_argument('--print-params','-pp',action='store_true',
-                       help='show parameters of the configuration being tuned')
-
-
-class CleanStop(Exception):
-  pass
-
-
-class LogFormatter(logging.Formatter):
-  def format(self, record):
-    record.relativeCreated /= 1000.0
-    try:
-      # python 2.7
-      return super(LogFormatter, self).format(record)
-    except:
-      # python 2.6
-      return _OldFormatter.format(self, record)
-
-
-_OldFormatter = logging.Formatter
-logging.Formatter = LogFormatter
-
-try:
-  # python 2.7
-  from logging.config import dictConfig
-except:
-  # python 2.6
-  from .utils.dictconfig import dictConfig
-
-the_logging_config = {
-  'version': 1,
-  'disable_existing_loggers': False,
-  'formatters': {'console': {'format': '[%(relativeCreated)6.0fs] '
-                                       '%(levelname)7s %(name)s: '
-                                       '%(message)s'},
-                 'file': {'format': '[%(asctime)-15s] '
-                                    '%(levelname)7s %(name)s: '
-                                    '%(message)s '
-                                    '@%(filename)s:%(lineno)d'}},
-  'handlers': {'console': {'class': 'logging.StreamHandler',
-                           'formatter': 'console',
-                           'level': 'INFO'},
-               'file': {'class': 'logging.FileHandler',
-                        'filename': 'opentuner.log',
-                        'formatter': 'file',
-                        'level': 'WARNING'}},
-  'loggers': {'': {'handlers': ['console', 'file'],
-                   'level': 'INFO',
-                   'propagate': True}}}
-
-
-def init_logging():
-  dictConfig(the_logging_config)
-  global init_logging
-  init_logging = lambda: None
-
-
-class TuningRunMain(object):
-  def __init__(self,
-               measurement_interface,
-               args,
-               search_driver=SearchDriver,
-               measurement_driver=MeasurementDriver):
-    init_logging()
-
-    manipulator = measurement_interface.manipulator()
-    if args.print_search_space_size:
-      print "10^{%.2f}" % math.log(manipulator.search_space_size(), 10)
-      sys.exit(0)
-    # show internal parameter representation
-    if args.print_params:
-      cfg = manipulator.seed_config()
-      d = manipulator.parameters_dict(cfg)
-      params_dict ={} 
-      for k in d: 
-        cls = d[k].__class__.__name__
-        p = (k, d[k].search_space_size())
-        if cls in params_dict:
-          params_dict[cls].append(p)
-        else:
-          params_dict[cls] = [p]
-      for k in params_dict:
-        print k, params_dict[k]
-        print
-      sys.exit(0)
-
-    input_manager = measurement_interface.input_manager()
-    objective = measurement_interface.objective()
-
-    if not args.database:
-      #args.database = 'sqlite://' #in memory
-      if not os.path.isdir('opentuner.db'):
-        os.mkdir('opentuner.db')
-      args.database = 'sqlite:///' + os.path.join('opentuner.db',
-                                                  socket.gethostname() + '.db')
-
-    if '://' not in args.database:
-      args.database = 'sqlite:///' + args.database
-
-    if not args.label:
-      args.label = 'unnamed'
-
-    #self.fake_commit = ('sqlite' in args.database)
-    self.fake_commit = True
-
-    self.args = args
-
-    self.engine, self.Session = resultsdb.connect(args.database)
-    self.session = self.Session()
-    self.tuning_run = None
-    self.search_driver_cls = search_driver
-    self.measurement_driver_cls = measurement_driver
-    self.measurement_interface = measurement_interface
-    self.input_manager = input_manager
-    self.manipulator = manipulator
-    self.objective = objective
-    self.objective_copy = copy.copy(objective)
-    self.last_commit_time = time.time()
-
-  def init(self):
-    if self.tuning_run is None:
-      program_version = (self.measurement_interface
-                         .db_program_version(self.session))
-      self.session.flush()
-      self.measurement_interface.prefix_hook(self.session)
-      self.tuning_run = (
-        resultsdb.models.TuningRun(
-          uuid=uuid.uuid4().hex,
-          name=self.args.label,
-          args=self.args,
-          start_date=datetime.now(),
-          program_version=program_version,
-          objective=self.objective_copy,
-        ))
-      self.session.add(self.tuning_run)
-
-      driver_kwargs = {
-        'args': self.args,
-        'input_manager': self.input_manager,
-        'manipulator': self.manipulator,
-        'measurement_interface': self.measurement_interface,
-        'objective': self.objective,
-        'session': self.session,
-        'tuning_run_main': self,
-        'tuning_run': self.tuning_run,
-        'extra_seeds': self.measurement_interface.seed_configurations(),
-        'extra_criteria': self.measurement_interface.extra_convergence_criteria
-      }
-
-      self.search_driver = self.search_driver_cls(**driver_kwargs)
-
-      self.measurement_driver = self.measurement_driver_cls(**driver_kwargs)
-      self.measurement_interface.set_driver(self.measurement_driver)
-      self.input_manager.set_driver(self.measurement_driver)
-
-      self.tuning_run.machine_class = self.measurement_driver.get_machine_class()
-      self.tuning_run.input_class = self.input_manager.get_input_class()
-
-  def commit(self, force=False):
-    if (force or not self.fake_commit or
-            time.time() - self.last_commit_time > 30):
-      self.session.commit()
-      self.last_commit_time = time.time()
-    else:
-      self.session.flush()
-
-  def main(self):
-    self.init()
-    try:
-      self.tuning_run.state = 'RUNNING'
-      self.commit(force=True)
-      self.search_driver.main()
-      if self.search_driver.best_result:
-        self.measurement_interface.save_final_config(
-            self.search_driver.best_result.configuration)
-      self.tuning_run.final_config = self.search_driver.best_result.configuration
-      self.tuning_run.state = 'COMPLETE'
-    except:
-      self.tuning_run.state = 'ABORTED'
-      raise
-    finally:
-      self.tuning_run.end_date = datetime.now()
-      self.commit(force=True)
-      self.session.close()
-
-  def results_wait(self, generation):
-    """called by search_driver to wait for results"""
-    #single process version:
-    self.measurement_interface.pre_process()  
-    self.measurement_driver.process_all()
-    self.measurement_interface.post_process()
-
-def main(interface, args, *pargs, **kwargs):
-  if inspect.isclass(interface):
-    interface = interface(args=args, *pargs, **kwargs)
-  return TuningRunMain(interface, args).main()
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/__init__.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/adddeps.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/adddeps.py
deleted file mode 100644
index e2fc74064b605e92367907a7641442df0cf97cd9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/adddeps.py
+++ /dev/null
@@ -1,13 +0,0 @@
-
-import sys
-from os.path import normpath, realpath, dirname, join, isfile
-
-project_root = normpath(join(dirname(realpath(__file__)), '../..'))
-
-if 'venv' not in ','.join(sys.path):
-  venv_activate = join(project_root, 'venv/bin/activate_this.py')
-  if isfile(venv_activate):
-    execfile(venv_activate, dict(__file__=venv_activate))
-
-sys.path.insert(0, project_root)
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/compactdb.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/compactdb.py
deleted file mode 100755
index 25a70d2d3b2658e877aa51a1462d5a9366635057..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/compactdb.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-
-if __name__ == '__main__':
-  import adddeps
-
-import argparse
-import logging
-import sys
-
-import opentuner
-from opentuner.resultsdb.models import *
-
-log = logging.getLogger('opentuner.utils.compactdb')
-
-argparser = argparse.ArgumentParser()
-argparser.add_argument('database')
-argparser.add_argument('--level', type=int, default=2)
-
-
-def main(args):
-  if '://' not in args.database:
-    args.database = "sqlite:///" + args.database
-  engine, Session = opentuner.resultsdb.connect(args.database)
-  session = Session()
-
-  config_count = session.query(Configuration).count()
-  # result_count = session.query(Result).count()
-  # desired_result_count = session.query(DesiredResult).count()
-
-  if args.level >= 1:
-    q = (session.query(Configuration)
-         .filter(~Configuration.id.in_(session.query(Result.configuration_id)
-                                       .filter_by(was_new_best=True)
-                                       .subquery()))
-         .filter(Configuration.data != None))
-
-    log.info("%s: compacted %d of %d Configurations",
-             args.database,
-             q.update({'data': None}, False),
-             config_count)
-    session.commit()
-
-  if args.level >= 2:
-    session.execute('VACUUM;')
-    session.commit()
-
-  log.info('done')
-
-
-if __name__ == '__main__':
-  opentuner.tuningrunmain.init_logging()
-  sys.exit(main(argparser.parse_args()))
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/dictconfig.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/dictconfig.py
deleted file mode 100644
index 7b835a41084d1c24f40002e93940c574b60bb696..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/dictconfig.py
+++ /dev/null
@@ -1,544 +0,0 @@
-# This is a copy of the Python logging.config.dictconfig module,
-# reproduced with permission. It is provided here for backwards
-# compatibility for Python versions prior to 2.7.
-#
-# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
-#
-# Permission to use, copy, modify, and distribute this software and its
-# documentation for any purpose and without fee is hereby granted,
-# provided that the above copyright notice appear in all copies and that
-# both that copyright notice and this permission notice appear in
-# supporting documentation, and that the name of Vinay Sajip
-# not be used in advertising or publicity pertaining to distribution
-# of the software without specific, written prior permission.
-# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
-# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
-# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
-# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
-# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-import logging.handlers
-import re
-import sys
-import types
-
-
-IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
-
-def valid_ident(s):
-    m = IDENTIFIER.match(s)
-    if not m:
-        raise ValueError('Not a valid Python identifier: %r' % s)
-    return True
-
-#
-# This function is defined in logging only in recent versions of Python
-#
-try:
-    from logging import _checkLevel
-except ImportError:
-    def _checkLevel(level):
-        if isinstance(level, int):
-            rv = level
-        elif str(level) == level:
-            if level not in logging._levelNames:
-                raise ValueError('Unknown level: %r' % level)
-            rv = logging._levelNames[level]
-        else:
-            raise TypeError('Level not an integer or a '
-                            'valid string: %r' % level)
-        return rv
-
-# The ConvertingXXX classes are wrappers around standard Python containers,
-# and they serve to convert any suitable values in the container. The
-# conversion converts base dicts, lists and tuples to their wrapped
-# equivalents, whereas strings which match a conversion format are converted
-# appropriately.
-#
-# Each wrapper should have a configurator attribute holding the actual
-# configurator to use for conversion.
-
-class ConvertingDict(dict):
-    """A converting dictionary wrapper."""
-
-    def __getitem__(self, key):
-        value = dict.__getitem__(self, key)
-        result = self.configurator.convert(value)
-        #If the converted value is different, save for next time
-        if value is not result:
-            self[key] = result
-            if type(result) in (ConvertingDict, ConvertingList,
-                                ConvertingTuple):
-                result.parent = self
-                result.key = key
-        return result
-
-    def get(self, key, default=None):
-        value = dict.get(self, key, default)
-        result = self.configurator.convert(value)
-        #If the converted value is different, save for next time
-        if value is not result:
-            self[key] = result
-            if type(result) in (ConvertingDict, ConvertingList,
-                                ConvertingTuple):
-                result.parent = self
-                result.key = key
-        return result
-
-    def pop(self, key, default=None):
-        value = dict.pop(self, key, default)
-        result = self.configurator.convert(value)
-        if value is not result:
-            if type(result) in (ConvertingDict, ConvertingList,
-                                ConvertingTuple):
-                result.parent = self
-                result.key = key
-        return result
-
-class ConvertingList(list):
-    """A converting list wrapper."""
-    def __getitem__(self, key):
-        value = list.__getitem__(self, key)
-        result = self.configurator.convert(value)
-        #If the converted value is different, save for next time
-        if value is not result:
-            self[key] = result
-            if type(result) in (ConvertingDict, ConvertingList,
-                                ConvertingTuple):
-                result.parent = self
-                result.key = key
-        return result
-
-    def pop(self, idx=-1):
-        value = list.pop(self, idx)
-        result = self.configurator.convert(value)
-        if value is not result:
-            if type(result) in (ConvertingDict, ConvertingList,
-                                ConvertingTuple):
-                result.parent = self
-        return result
-
-class ConvertingTuple(tuple):
-    """A converting tuple wrapper."""
-    def __getitem__(self, key):
-        value = tuple.__getitem__(self, key)
-        result = self.configurator.convert(value)
-        if value is not result:
-            if type(result) in (ConvertingDict, ConvertingList,
-                                ConvertingTuple):
-                result.parent = self
-                result.key = key
-        return result
-
-class BaseConfigurator(object):
-    """
-    The configurator base class which defines some useful defaults.
-    """
-
-    CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
-
-    WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
-    DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
-    INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
-    DIGIT_PATTERN = re.compile(r'^\d+$')
-
-    value_converters = {
-        'ext' : 'ext_convert',
-        'cfg' : 'cfg_convert',
-    }
-
-    # We might want to use a different one, e.g. importlib
-    importer = __import__
-
-    def __init__(self, config):
-        self.config = ConvertingDict(config)
-        self.config.configurator = self
-
-    def resolve(self, s):
-        """
-        Resolve strings to objects using standard import and attribute
-        syntax.
-        """
-        name = s.split('.')
-        used = name.pop(0)
-        try:
-            found = self.importer(used)
-            for frag in name:
-                used += '.' + frag
-                try:
-                    found = getattr(found, frag)
-                except AttributeError:
-                    self.importer(used)
-                    found = getattr(found, frag)
-            return found
-        except ImportError:
-            e, tb = sys.exc_info()[1:]
-            v = ValueError('Cannot resolve %r: %s' % (s, e))
-            v.__cause__, v.__traceback__ = e, tb
-            raise v
-
-    def ext_convert(self, value):
-        """Default converter for the ext:// protocol."""
-        return self.resolve(value)
-
-    def cfg_convert(self, value):
-        """Default converter for the cfg:// protocol."""
-        rest = value
-        m = self.WORD_PATTERN.match(rest)
-        if m is None:
-            raise ValueError("Unable to convert %r" % value)
-        else:
-            rest = rest[m.end():]
-            d = self.config[m.groups()[0]]
-            #print d, rest
-            while rest:
-                m = self.DOT_PATTERN.match(rest)
-                if m:
-                    d = d[m.groups()[0]]
-                else:
-                    m = self.INDEX_PATTERN.match(rest)
-                    if m:
-                        idx = m.groups()[0]
-                        if not self.DIGIT_PATTERN.match(idx):
-                            d = d[idx]
-                        else:
-                            try:
-                                n = int(idx) # try as number first (most likely)
-                                d = d[n]
-                            except TypeError:
-                                d = d[idx]
-                if m:
-                    rest = rest[m.end():]
-                else:
-                    raise ValueError('Unable to convert '
-                                     '%r at %r' % (value, rest))
-        #rest should be empty
-        return d
-
-    def convert(self, value):
-        """
-        Convert values to an appropriate type. dicts, lists and tuples are
-        replaced by their converting alternatives. Strings are checked to
-        see if they have a conversion format and are converted if they do.
-        """
-        if not isinstance(value, ConvertingDict) and isinstance(value, dict):
-            value = ConvertingDict(value)
-            value.configurator = self
-        elif not isinstance(value, ConvertingList) and isinstance(value, list):
-            value = ConvertingList(value)
-            value.configurator = self
-        elif not isinstance(value, ConvertingTuple) and\
-                 isinstance(value, tuple):
-            value = ConvertingTuple(value)
-            value.configurator = self
-        return value
-
-    def configure_custom(self, config):
-        """Configure an object with a user-supplied factory."""
-        c = config.pop('()')
-        if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
-            c = self.resolve(c)
-        props = config.pop('.', None)
-        # Check for valid identifiers
-        kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
-        result = c(**kwargs)
-        if props:
-            for name, value in props.items():
-                setattr(result, name, value)
-        return result
-
-    def as_tuple(self, value):
-        """Utility function which converts lists to tuples."""
-        if isinstance(value, list):
-            value = tuple(value)
-        return value
-
-class DictConfigurator(BaseConfigurator):
-    """
-    Configure logging using a dictionary-like object to describe the
-    configuration.
-    """
-
-    def configure(self):
-        """Do the configuration."""
-
-        config = self.config
-        if 'version' not in config:
-            raise ValueError("dictionary doesn't specify a version")
-        if config['version'] != 1:
-            raise ValueError("Unsupported version: %s" % config['version'])
-        incremental = config.pop('incremental', False)
-        EMPTY_DICT = {}
-        logging._acquireLock()
-        try:
-            if incremental:
-                handlers = config.get('handlers', EMPTY_DICT)
-                # incremental handler config only if handler name
-                # ties in to logging._handlers (Python 2.7)
-                if sys.version_info[:2] == (2, 7):
-                    for name in handlers:
-                        if name not in logging._handlers:
-                            raise ValueError('No handler found with '
-                                             'name %r'  % name)
-                        else:
-                            try:
-                                handler = logging._handlers[name]
-                                handler_config = handlers[name]
-                                level = handler_config.get('level', None)
-                                if level:
-                                    handler.setLevel(_checkLevel(level))
-                            except StandardError as e:
-                                raise ValueError('Unable to configure handler '
-                                                 '%r: %s' % (name, e))
-                loggers = config.get('loggers', EMPTY_DICT)
-                for name in loggers:
-                    try:
-                        self.configure_logger(name, loggers[name], True)
-                    except StandardError as e:
-                        raise ValueError('Unable to configure logger '
-                                         '%r: %s' % (name, e))
-                root = config.get('root', None)
-                if root:
-                    try:
-                        self.configure_root(root, True)
-                    except StandardError as e:
-                        raise ValueError('Unable to configure root '
-                                         'logger: %s' % e)
-            else:
-                disable_existing = config.pop('disable_existing_loggers', True)
-
-                logging._handlers.clear()
-                del logging._handlerList[:]
-
-                # Do formatters first - they don't refer to anything else
-                formatters = config.get('formatters', EMPTY_DICT)
-                for name in formatters:
-                    try:
-                        formatters[name] = self.configure_formatter(
-                                                            formatters[name])
-                    except StandardError as e:
-                        raise ValueError('Unable to configure '
-                                         'formatter %r: %s' % (name, e))
-                # Next, do filters - they don't refer to anything else, either
-                filters = config.get('filters', EMPTY_DICT)
-                for name in filters:
-                    try:
-                        filters[name] = self.configure_filter(filters[name])
-                    except StandardError as e:
-                        raise ValueError('Unable to configure '
-                                         'filter %r: %s' % (name, e))
-
-                # Next, do handlers - they refer to formatters and filters
-                # As handlers can refer to other handlers, sort the keys
-                # to allow a deterministic order of configuration
-                handlers = config.get('handlers', EMPTY_DICT)
-                for name in sorted(handlers):
-                    try:
-                        handler = self.configure_handler(handlers[name])
-                        handler.name = name
-                        handlers[name] = handler
-                    except StandardError as e:
-                        raise ValueError('Unable to configure handler '
-                                         '%r: %s' % (name, e))
-                # Next, do loggers - they refer to handlers and filters
-
-                #we don't want to lose the existing loggers,
-                #since other threads may have pointers to them.
-                #existing is set to contain all existing loggers,
-                #and as we go through the new configuration we
-                #remove any which are configured. At the end,
-                #what's left in existing is the set of loggers
-                #which were in the previous configuration but
-                #which are not in the new configuration.
-                root = logging.root
-                existing = list(root.manager.loggerDict)
-                #The list needs to be sorted so that we can
-                #avoid disabling child loggers of explicitly
-                #named loggers. With a sorted list it is easier
-                #to find the child loggers.
-                existing.sort()
-                #We'll keep the list of existing loggers
-                #which are children of named loggers here...
-                child_loggers = []
-                #now set up the new ones...
-                loggers = config.get('loggers', EMPTY_DICT)
-                for name in loggers:
-                    if name in existing:
-                        i = existing.index(name)
-                        prefixed = name + "."
-                        pflen = len(prefixed)
-                        num_existing = len(existing)
-                        i = i + 1 # look at the entry after name
-                        while (i < num_existing) and\
-                              (existing[i][:pflen] == prefixed):
-                            child_loggers.append(existing[i])
-                            i = i + 1
-                        existing.remove(name)
-                    try:
-                        self.configure_logger(name, loggers[name])
-                    except StandardError as e:
-                        raise ValueError('Unable to configure logger '
-                                         '%r: %s' % (name, e))
-
-                #Disable any old loggers. There's no point deleting
-                #them as other threads may continue to hold references
-                #and by disabling them, you stop them doing any logging.
-                #However, don't disable children of named loggers, as that's
-                #probably not what was intended by the user.
-                for log in existing:
-                    logger = root.manager.loggerDict[log]
-                    if log in child_loggers:
-                        logger.level = logging.NOTSET
-                        logger.handlers = []
-                        logger.propagate = True
-                    elif disable_existing:
-                        logger.disabled = True
-
-                # And finally, do the root logger
-                root = config.get('root', None)
-                if root:
-                    try:
-                        self.configure_root(root)
-                    except StandardError as e:
-                        raise ValueError('Unable to configure root '
-                                         'logger: %s' % e)
-        finally:
-            logging._releaseLock()
-
-    def configure_formatter(self, config):
-        """Configure a formatter from a dictionary."""
-        if '()' in config:
-            factory = config['()'] # for use in exception handler
-            try:
-                result = self.configure_custom(config)
-            except TypeError as te:
-                if "'format'" not in str(te):
-                    raise
-                #Name of parameter changed from fmt to format.
-                #Retry with old name.
-                #This is so that code can be used with older Python versions
-                #(e.g. by Django)
-                config['fmt'] = config.pop('format')
-                config['()'] = factory
-                result = self.configure_custom(config)
-        else:
-            fmt = config.get('format', None)
-            dfmt = config.get('datefmt', None)
-            result = logging.Formatter(fmt, dfmt)
-        return result
-
-    def configure_filter(self, config):
-        """Configure a filter from a dictionary."""
-        if '()' in config:
-            result = self.configure_custom(config)
-        else:
-            name = config.get('name', '')
-            result = logging.Filter(name)
-        return result
-
-    def add_filters(self, filterer, filters):
-        """Add filters to a filterer from a list of names."""
-        for f in filters:
-            try:
-                filterer.addFilter(self.config['filters'][f])
-            except StandardError as e:
-                raise ValueError('Unable to add filter %r: %s' % (f, e))
-
-    def configure_handler(self, config):
-        """Configure a handler from a dictionary."""
-        formatter = config.pop('formatter', None)
-        if formatter:
-            try:
-                formatter = self.config['formatters'][formatter]
-            except StandardError as e:
-                raise ValueError('Unable to set formatter '
-                                 '%r: %s' % (formatter, e))
-        level = config.pop('level', None)
-        filters = config.pop('filters', None)
-        if '()' in config:
-            c = config.pop('()')
-            if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
-                c = self.resolve(c)
-            factory = c
-        else:
-            klass = self.resolve(config.pop('class'))
-            #Special case for handler which refers to another handler
-            if issubclass(klass, logging.handlers.MemoryHandler) and\
-                'target' in config:
-                try:
-                    config['target'] = self.config['handlers'][config['target']]
-                except StandardError as e:
-                    raise ValueError('Unable to set target handler '
-                                     '%r: %s' % (config['target'], e))
-            elif issubclass(klass, logging.handlers.SMTPHandler) and\
-                'mailhost' in config:
-                config['mailhost'] = self.as_tuple(config['mailhost'])
-            elif issubclass(klass, logging.handlers.SysLogHandler) and\
-                'address' in config:
-                config['address'] = self.as_tuple(config['address'])
-            factory = klass
-        kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
-        try:
-            result = factory(**kwargs)
-        except TypeError as te:
-            if "'stream'" not in str(te):
-                raise
-            #The argument name changed from strm to stream
-            #Retry with old name.
-            #This is so that code can be used with older Python versions
-            #(e.g. by Django)
-            kwargs['strm'] = kwargs.pop('stream')
-            result = factory(**kwargs)
-        if formatter:
-            result.setFormatter(formatter)
-        if level is not None:
-            result.setLevel(_checkLevel(level))
-        if filters:
-            self.add_filters(result, filters)
-        return result
-
-    def add_handlers(self, logger, handlers):
-        """Add handlers to a logger from a list of names."""
-        for h in handlers:
-            try:
-                logger.addHandler(self.config['handlers'][h])
-            except StandardError as e:
-                raise ValueError('Unable to add handler %r: %s' % (h, e))
-
-    def common_logger_config(self, logger, config, incremental=False):
-        """
-        Perform configuration which is common to root and non-root loggers.
-        """
-        level = config.get('level', None)
-        if level is not None:
-            logger.setLevel(_checkLevel(level))
-        if not incremental:
-            #Remove any existing handlers
-            for h in logger.handlers[:]:
-                logger.removeHandler(h)
-            handlers = config.get('handlers', None)
-            if handlers:
-                self.add_handlers(logger, handlers)
-            filters = config.get('filters', None)
-            if filters:
-                self.add_filters(logger, filters)
-
-    def configure_logger(self, name, config, incremental=False):
-        """Configure a non-root logger from a dictionary."""
-        logger = logging.getLogger(name)
-        self.common_logger_config(logger, config, incremental)
-        propagate = config.get('propagate', None)
-        if propagate is not None:
-            logger.propagate = propagate
-
-    def configure_root(self, config, incremental=False):
-        """Configure a root logger from a dictionary."""
-        root = logging.getLogger()
-        self.common_logger_config(root, config, incremental)
-
-dictConfigClass = DictConfigurator
-
-def dictConfig(config):
-    """Configure logging using a dictionary."""
-    dictConfigClass(config).configure()
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/stats.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/stats.py
deleted file mode 100755
index 99449c8a900a3f8ad53c6c12fbbc4d2197b1cb45..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/stats.py
+++ /dev/null
@@ -1,468 +0,0 @@
-#!/usr/bin/env python
-
-if __name__ == '__main__':
-  import adddeps
-
-import argparse
-import csv
-import hashlib
-import itertools
-import logging
-import math
-import os
-import sqlalchemy.orm.exc
-import subprocess
-import sys
-
-from collections import defaultdict
-from fn import _
-from fn import Stream
-from fn.iters import repeat
-from pprint import pprint
-
-import opentuner
-from opentuner import resultsdb
-from opentuner.resultsdb.models import *
-
-log = logging.getLogger('opentuner.utils.stats')
-
-argparser = argparse.ArgumentParser()
-argparser.add_argument('--label')
-argparser.add_argument('--stats', action='store_true',
-                       help="run in stats mode")
-argparser.add_argument('--by-request-count', action='store_true',
-                       help='report stats by request count')
-argparser.add_argument('--stats-quanta', type=float, default=10,
-                       help="step size in seconds for binning with --stats")
-argparser.add_argument('--stats-dir', default='stats',
-                       help="directory to output --stats to")
-argparser.add_argument('--stats-input', default="opentuner.db")
-argparser.add_argument('--min-runs',  type=int, default=1,
-                       help="ignore series with less then N runs")
-
-PCTSTEPS = map(_/20.0, xrange(21))
-
-def mean(vals):
-  n = 0.0
-  d = 0.0
-  for v in vals:
-    if v is not None:
-      n += v
-      d += 1.0
-  if d == 0.0:
-    return None
-  return n/d
-
-def median(vals):
-  vals = sorted(vals)
-  a = (len(vals)-1)/2
-  b = (len(vals))/2
-  return (vals[a]+vals[b])/2.0
-
-def percentile(vals, pct):
-  vals = sorted(vals)
-  pos = (len(vals)-1) * pct
-  a = int(math.floor(pos))
-  b = min(len(vals) - 1, a + 1)
-  return (1.0-(pos-a))*vals[a] + (pos-a)*vals[b]
-
-def variance(vals):
-  vals = filter(lambda x: x is not None, vals)
-  avg = mean(vals)
-  if avg is None:
-    return None
-  if avg in (float('inf'), float('-inf')):
-    return avg
-  return mean(map((_ - avg) ** 2, vals))
-
-def stddev(vals):
-  var = variance(vals)
-  if var is None:
-    return None
-  return math.sqrt(var)
-
-def hash_args(x):
-  d = dict(vars(x))
-  for k in ('database', 'results_log', 'results_log_details'):
-    d[k] = None
-  return hashlib.sha256(str(sorted(d.items()))).hexdigest()[:20]
-
-def run_label(tr, short = False):
-  techniques = ','.join(tr.args.technique)
-  if not tr.name or tr.name=='unnamed':
-    if short:
-      return techniques
-    else:
-      return "%s_%s" % (techniques, hash_args(tr.args)[:6])
-  else:
-    return tr.name
-
-def run_dir(base, tr):
-  return os.path.join(base,
-                      tr.program.project,
-                      tr.program.name.split('/')[-1],
-                      tr.program_version.version[:16])
-
-class StatsMain(object):
-  def __init__(self, args):
-    self.args = args
-    path = args.stats_input
-    self.dbs = list()
-    for f in os.listdir(path):
-      if 'journal' in f:
-        continue
-      try:
-        e, sm = resultsdb.connect('sqlite:///'+os.path.join(path, f))
-        self.dbs.append(sm())
-      except:
-        log.error('failed to load database: %s', 
-                  os.path.join(path, f),
-                  exc_info=True)
-
-  def main(self):
-    dir_label_runs = defaultdict(lambda: defaultdict(list))
-    for session in self.dbs:
-      q = (session.query(resultsdb.models.TuningRun)
-          .filter_by(state='COMPLETE')
-          .order_by('name'))
-
-      if self.args.label:
-        q = q.filter(TuningRun.name.in_(
-          map(str.strip,self.args.label.split(','))))
-
-      for tr in q:
-        d = run_dir(self.args.stats_dir, tr)
-        d = os.path.normpath(d)
-        dir_label_runs[d][run_label(tr)].append((tr, session))
-
-    summary_report = defaultdict(lambda: defaultdict(list))
-    for d, label_runs in dir_label_runs.iteritems():
-      if not os.path.isdir(d):
-        os.makedirs(d)
-      session = label_runs.values()[0][0][1]
-      objective = label_runs.values()[0][0][0].objective
-      all_run_ids = map(_[0].id, itertools.chain(*label_runs.values()))
-      q = (session.query(Result)
-           .filter(Result.tuning_run_id.in_(all_run_ids))
-           .filter(Result.time < float('inf'))
-           .filter_by(was_new_best=True, state='OK'))
-      total = q.count()
-      if total == 0: 
-          continue
-      q = objective.filter_acceptable(q)
-      acceptable = q.count()
-      q = q.order_by(*objective.result_order_by_terms())
-      best = q.limit(1).one()
-      worst = q.offset(acceptable-1).limit(1).one()
-
-      map(len, label_runs.values())
-
-      log.info("%s -- best %.4f / worst %.f4 "
-               "-- %d of %d acceptable -- %d techniques with %d to %d runs",
-               d,
-               best.time,
-               worst.time,
-               acceptable,
-               total,
-               len(label_runs.values()),
-               min(map(len, label_runs.values())),
-               max(map(len, label_runs.values())))
-
-      for label, runs in sorted(label_runs.items()):
-        if len(runs) < self.args.min_runs:
-          print len(runs) ,self.args.min_runs
-          continue
-        log.debug('%s/%s has %d runs %s',d, label, len(runs), runs[0][0].args.technique)
-        self.combined_stats_over_time(d, label, runs, objective, worst, best)
-
-        final_scores = list()
-        for run, session in runs:
-          try:
-            final = (session.query(Result)
-                    .filter_by(tuning_run=run,
-                               configuration=run.final_config)
-                    .limit(1)
-                    .one())
-          except sqlalchemy.orm.exc.NoResultFound:
-            continue
-          final_scores.append(objective.stats_quality_score(final, worst, best))
-        final_scores.sort()
-        if final_scores:
-          norm = objective.stats_quality_score(best, worst, best)
-          if norm > 0.00001:
-            summary_report[d][run_label(run, short=True)] = (
-                percentile(final_scores, 0.5) / norm,
-                percentile(final_scores, 0.1) / norm,
-                percentile(final_scores, 0.9) / norm,
-              )
-          else:
-            summary_report[d][run_label(run, short=True)] = (
-                percentile(final_scores, 0.5) + norm + 1.0,
-                percentile(final_scores, 0.1) + norm + 1.0,
-                percentile(final_scores, 0.9) + norm + 1.0,
-              )
-
-
-    with open(self.args.stats_dir+ "/summary.dat", 'w') as o:
-      # make summary report
-      keys = sorted(reduce(set.union,
-                           [set(x.keys()) for x in summary_report.values()],
-                           set()))
-      print >>o, '#####',
-      for k in keys:
-        print >>o, k,
-      print >>o
-      for d, label_vals in sorted(summary_report.items()):
-        print >>o, d.split('/')[-2],
-        for k in keys:
-          if k in label_vals:
-            print >>o, '-', label_vals[k][0], label_vals[k][1], label_vals[k][2],
-          else:
-            print >>o, '-', '-', '-', '-',
-        print >>o
-
-    if keys:
-      plotcmd = ["""1 w lines lt 1 lc rgb "black" notitle""",
-                 """'summary.dat' using 3:4:5:xtic(1) ti "%s" """ % keys[0]]
-      for n, k in enumerate(keys[1:]):
-        plotcmd.append("""'' using %d:%d:%d ti "%s" """ % (
-                        4*n + 7,
-                        4*n + 8,
-                        4*n + 9,
-                        k))
-      self.gnuplot_summary_file(self.args.stats_dir, 'summary', plotcmd)
-
-
-
-    for d, label_runs in dir_label_runs.iteritems():
-      labels = [k for k,v in label_runs.iteritems()
-                if len(v)>=self.args.min_runs]
-      self.gnuplot_file(d,
-                        "medianperfe",
-                        ['"%s_percentiles.dat" using 1:12:4:18 with errorbars title "%s"' % (l,l) for l in labels])
-      self.gnuplot_file(d,
-                        "meanperfe",
-                        ['"%s_percentiles.dat" using 1:21:4:18 with errorbars title "%s"' % (l,l) for l in labels])
-      self.gnuplot_file(d,
-                        "medianperfl",
-                        ['"%s_percentiles.dat" using 1:12 with lines title "%s"' % (l,l) for l in labels])
-      self.gnuplot_file(d,
-                        "meanperfl",
-                        ['"%s_percentiles.dat" using 1:21 with lines title "%s"' % (l,l) for l in labels])
-
-    # print
-    # print "10% Scores", d
-    # pprint(self.technique_scores(d, labels, '0.1'))
-    # print
-    # print "90% Scores", d
-    # pprint(self.technique_scores(d, labels, '0.9'))
-    # print
-    # print "Mean Scores", d
-    # pprint(self.technique_scores(d, labels, 'mean'))
-      print
-      print "Median Scores", d
-      pprint(self.technique_scores(d, labels, '0.5'))
-
-
-  def technique_scores(self, directory, labels, ykey, xkey='#sec', factor=10.0):
-    max_duration = None
-    min_value = float('inf')
-    for label in labels:
-      try:
-        dr = csv.DictReader(open(os.path.join(directory,label+"_percentiles.dat")), delimiter=' ', lineterminator='\n')
-        lastrow = list(dr)[-1]
-        max_duration = max(max_duration, float(lastrow[xkey]))
-        min_value = min(min_value, float(lastrow[ykey]))
-      except:
-        log.exception("failed computing score")
-
-    scores = list()
-
-    for label in labels:
-      try:
-        dr = csv.DictReader(open(os.path.join(directory,label+"_percentiles.dat")), delimiter=' ', lineterminator='\n')
-        score = 0.0
-        lastsec = 0.0
-        value = float('inf')
-        for row in dr:
-          duration = float(row[xkey]) - lastsec
-          lastsec = float(row[xkey])
-          value = float(row[ykey])
-          score += duration * (value - min_value)
-        score += (factor*max_duration - lastsec) * (value - min_value)
-        scores.append((score, label))
-      except:
-        log.exception("failed computing score")
-
-    return sorted(scores)
-
-
-  def combined_stats_over_time(self,
-                               output_dir,
-                               label,
-                               runs,
-                               objective,
-                               worst,
-                               best,
-                               ):
-    """
-    combine stats_over_time() vectors for multiple runs
-    """
-
-    #extract_fn = lambda dr: objective.stats_quality_score(dr.result, worst, best)
-    extract_fn = _.result.time
-    combine_fn = min
-    no_data = 999
-
-    log.debug("writing stats for %s to %s", label, output_dir)
-    by_run = [self.stats_over_time(session, run, extract_fn, combine_fn, no_data)
-              for run, session in runs]
-    max_len = max(map(len, by_run))
-
-    by_run_streams = [Stream() << x << repeat(x[-1], max_len-len(x))
-                      for x in by_run]
-    by_quanta = zip(*by_run_streams[:])
-
-    def data_file(suffix, headers, value_function):
-      with open(os.path.join(output_dir, label+suffix), 'w') as fd:
-        out = csv.writer(fd, delimiter=' ', lineterminator='\n')
-        out.writerow(['#sec'] + headers)
-        for quanta, values in enumerate(by_quanta):
-          sec = quanta*self.args.stats_quanta
-          out.writerow([sec] + value_function(values))
-
-   #data_file('_details.dat',
-   #          map(lambda x: 'run%d'%x, xrange(max_len)),
-   #          list)
-   #self.gnuplot_file(output_dir,
-   #                  label+'_details',
-   #                  [('"'+label+'_details.dat"'
-   #                    ' using 1:%d'%i +
-   #                    ' with lines'
-   #                    ' title "Run %d"'%i)
-   #                   for i in xrange(max_len)])
-
-    data_file('_mean.dat',
-              ['#sec', 'mean', 'stddev'],
-              lambda values: [mean(values), stddev(values)])
-    self.gnuplot_file(output_dir,
-                      label+'_mean',
-                      ['"'+label+'_mean.dat" using 1:2 with lines title "Mean"'])
-
-    def extract_percentiles(values):
-      values = sorted(values)
-      return ([values[int(round(p*(len(values)-1)))] for p in PCTSTEPS]
-             + [mean(values)])
-    data_file("_percentiles.dat", PCTSTEPS + ['mean'], extract_percentiles)
-    self.gnuplot_file(output_dir,
-                      label+'_percentiles',
-                      reversed([
-                        '"'+label+'_percentiles.dat" using 1:2  with lines title "0%"',
-                      # '""                          using 1:3  with lines title "5%"',
-                        '""                          using 1:4  with lines title "10%"',
-                      # '""                          using 1:5  with lines title "25%"',
-                        '""                          using 1:6  with lines title "20%"',
-                      # '""                          using 1:7  with lines title "35%"',
-                        '""                          using 1:8  with lines title "30%"',
-                      # '""                          using 1:9  with lines title "45%"',
-                        '""                          using 1:10 with lines title "40%"',
-                      # '""                          using 1:11 with lines title "55%"',
-                        '""                          using 1:12 with lines title "50%"',
-                      # '""                          using 1:13 with lines title "65%"',
-                        '""                          using 1:14 with lines title "70%"',
-                      # '""                          using 1:15 with lines title "75%"',
-                        '""                          using 1:16 with lines title "80%"',
-                      # '""                          using 1:17 with lines title "85%"',
-                        '""                          using 1:18 with lines title "90%"',
-                      # '""                          using 1:19 with lines title "95%"',
-                        '"'+label+'_percentiles.dat" using 1:20 with lines title "100%"',
-                       ]))
-
-  def gnuplot_file(self, output_dir, prefix, plotcmd):
-    with open(os.path.join(output_dir, prefix+'.gnuplot'), 'w') as fd:
-      print >>fd, 'set terminal postscript eps enhanced color'
-      print >>fd, 'set output "%s"' % (prefix+'.eps')
-      print >>fd, 'set ylabel "Execution Time (seconds)"'
-      print >>fd, 'set xlabel "Autotuning Time (seconds)"'
-      print >>fd, 'plot', ',\\\n'.join(plotcmd)
-
-    try:
-      subprocess.call(['gnuplot', prefix+'.gnuplot'], cwd=output_dir, stdin=None)
-    except OSError:
-      log.error("command gnuplot not found")
-
-  def gnuplot_summary_file(self, output_dir, prefix, plotcmd):
-    with open(os.path.join(output_dir, prefix+'.gnuplot'), 'w') as fd:
-      print >>fd, 'set terminal postscript eps enhanced color'
-      print >>fd, 'set output "%s"' % (prefix+'.eps')
-      print >>fd, '''
-set boxwidth 0.9
-set style fill solid 1.00 border 0
-set style histogram errorbars gap 2 lw 1
-set style data histograms
-set xtics rotate by -45
-set bars 0.5
-set yrange [0:20]
-
-set yrange [0:10]
-set key out vert top left
-set size 1.5,1
-set ytics 1
-
-'''
-      print >>fd, 'plot', ',\\\n'.join(plotcmd)
-    subprocess.call(['gnuplot', prefix+'.gnuplot'], cwd=output_dir, stdin=None)
-
-
-  def stats_over_time(self,
-                      session,
-                      run,
-                      extract_fn,
-                      combine_fn,
-                      no_data = None):
-    """
-    return reduce(combine_fn, map(extract_fn, data)) for each quanta of the
-    tuning run
-    """
-    value_by_quanta = [ no_data ]
-    start_date = run.start_date
-
-    subq = (session.query(Result.id)
-           .filter_by(tuning_run = run, was_new_best = True, state='OK'))
-
-    q = (session.query(DesiredResult)
-         .join(Result)
-         .filter(DesiredResult.state=='COMPLETE',
-                 DesiredResult.tuning_run == run,
-                 DesiredResult.result_id.in_(subq.subquery()))
-         .order_by(DesiredResult.request_date))
-
-    first_id = None
-    for dr in q:
-      if first_id is None:
-        first_id = dr.id
-      td = (dr.request_date - start_date)
-      duration = td.seconds + (td.days * 24 * 3600.0)
-      if self.args.by_request_count:
-        quanta = dr.id - first_id
-      else:
-        quanta = int(duration / self.args.stats_quanta)
-      while len(value_by_quanta) <= quanta:
-        value_by_quanta.append(value_by_quanta[-1])
-
-      if value_by_quanta[-1] is no_data:
-        value_by_quanta[-1] = extract_fn(dr)
-      else:
-        value_by_quanta[-1] = combine_fn(value_by_quanta[-1], extract_fn(dr))
-
-    return value_by_quanta
-
-
-
-
-
-if __name__ == '__main__':
-  opentuner.tuningrunmain.init_logging()
-  sys.exit(StatsMain(argparser.parse_args()).main())
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/stats_matplotlib.py b/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/stats_matplotlib.py
deleted file mode 100644
index 54e9132a662fa68089ce3d0ba00cb6502bd2c712..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/opentuner/utils/stats_matplotlib.py
+++ /dev/null
@@ -1,290 +0,0 @@
-#!usr/bin/python
-
-if __name__ == '__main__':
-  import adddeps
-
-import itertools
-import math
-import matplotlib.pyplot as plt
-import numpy
-import os
-import sqlalchemy
-import sqlalchemy.orm.exc
-
-from collections import defaultdict
-from fn import _
-from fn import Stream
-from fn.iters import repeat
-from opentuner import resultsdb
-
-PCTSTEPS = map(_/20.0, xrange(21))
-
-
-def mean(vals):
-  """
-  Arguments,
-    vals: List of floating point numbers
-  Returns,
-    The mean of the numbers in the input list
-    None if all values in the list are None
-  """
-  filtered_values = [float(x) for x in vals if x is not None]
-  if len(filtered_values) == 0:
-    return None
-  return numpy.mean(numpy.array(filtered_values))
-
-
-def stddev(vals):
-  """
-  Arguments,
-    vals: List of floating point numbers
-  Returns,
-    The standard deviation of numbers in the input list
-    None if all values in the list are None
-  """
-  filtered_values = [float(x) for x in vals if x is not None]
-  if len(filtered_values) == 0:
-    return None
-  return math.sqrt(numpy.var(numpy.array(filtered_values)))
-
-
-def get_dbs(path, db_type='sqlite:///'):
-  """
-  Arguments,
-    path: Path of directory containing .db files
-  Returns,
-    A list of (engine, session) pairs to the dbs pointed to by
-    the db files
-  """
-  dbs = list()
-  for f in os.listdir(path):
-    if 'journal' in f:
-      continue
-    try:
-      db_path = os.path.join(path, f)
-      e, sm = resultsdb.connect(db_type + db_path)
-      dbs.append(sm())
-    except Exception as e:
-      print e
-      print "Error encountered while connecting to db"
-  return dbs
-
-
-def matplotlibplot_file(labels, xlim = None, ylim = None, disp_types=['median']):
-  """
-  Arguments,
-    labels: List of labels that need to be included in the plot
-    xlim: Integer denoting the maximum X-coordinate in the plot
-    ylim: Integer denoting the maximum Y-coordinate in the plot
-    disp_types: List of measures that are to be displayed in the plot
-  Returns,
-    A figure object representing the required plot
-  """
-
-  figure = plt.figure()
-  values = get_values(labels)
-  for label in values:
-    (mean_values, percentile_values) = values[label]
-    for disp_type in disp_types:
-      cols = None
-      data = percentile_values
-
-      if disp_type == 'median':
-        cols = [11]
-      elif disp_type == 'mean':
-        cols = [1]
-        data = mean_values
-      elif disp_type == 'all_percentiles':
-        cols = range(1,22)
-
-      plotted_data = [[] for x in xrange(len(cols))]
-
-      x_indices = []
-      for data_point in data[1:]:
-        x_indices.append(int(data_point[0]))
-        for i in range(0, len(cols)):
-          plotted_data[i].append(float(data_point[cols[i]]))
-      args = []
-      for to_plot in plotted_data:
-        args.append(x_indices)
-        args.append(to_plot)
-
-      plt.plot(*args, label='%s(%s)' % (label, disp_type))
-
-  if xlim is not None:
-    plt.xlim(xlim)
-  if ylim is not None:
-    plt.ylim(ylim)
-
-  plt.xlabel('Autotuning Time (seconds)')
-  plt.ylabel('Execution Time (seconds)')
-  plt.legend(loc='upper right')
-  return figure
-
-
-def run_label(tr):
-  techniques = ','.join(tr.args.technique)
-  if not tr.name or tr.name == 'unnamed':
-    return techniques
-  return tr.name
-
-
-def combined_stats_over_time(label,
-                             runs,
-                             objective,
-                             worst,
-                             best,
-                             ):
-  """
-  combine stats_over_time() vectors for multiple runs
-  """
-
-  extract_fn = _.result.time
-  combine_fn = min
-  no_data = 999
-
-  by_run = [stats_over_time(session, run, extract_fn, combine_fn, no_data)
-            for run, session in runs]
-  max_len = max(map(len, by_run))
-
-  by_run_streams = [Stream() << x << repeat(x[-1], max_len-len(x))
-                    for x in by_run]
-  by_quanta = zip(*by_run_streams[:])
-
-  # TODO: Fix this, this variable should be configurable
-  stats_quanta = 10
-  def get_data(value_function):
-    final_values = []
-    for quanta, values in enumerate(by_quanta):
-      sec = quanta*stats_quanta
-      final_values.append([sec] + value_function(values))
-    return final_values
-
-  mean_values = get_data(lambda values: [mean(values), stddev(values)])
-
-  def extract_percentiles(values):
-    values = sorted(values)
-    return ([values[int(round(p*(len(values)-1)))] for p in PCTSTEPS]
-           + [mean(values)])
-  percentile_values = get_data(extract_percentiles)
-  return mean_values, percentile_values
-
-
-def stats_over_time(session,
-                    run,
-                    extract_fn,
-                    combine_fn,
-                    no_data = None):
-  """
-  return reduce(combine_fn, map(extract_fn, data)) for each quanta of the
-  tuning run
-  """
-  value_by_quanta = [ no_data ]
-  start_date = run.start_date
-
-  subq = (session.query(resultsdb.models.Result.id)
-         .filter_by(tuning_run = run, was_new_best = True, state='OK'))
-
-  q = (session.query(resultsdb.models.DesiredResult)
-       .join(resultsdb.models.Result)
-       .filter(resultsdb.models.DesiredResult.state=='COMPLETE',
-               resultsdb.models.DesiredResult.tuning_run == run,
-               resultsdb.models.DesiredResult.result_id.in_(subq.subquery()))
-       .order_by(resultsdb.models.DesiredResult.request_date))
-
-  first_id = None
-  for dr in q:
-    if first_id is None:
-      first_id = dr.id
-    td = (dr.request_date - start_date)
-    duration = td.seconds + (td.days * 24 * 3600.0)
-    # TODO: Make this variable configurable
-    by_request_count = True
-    stats_quanta = 10
-    if by_request_count:
-      quanta = dr.id - first_id
-    else:
-      quanta = int(duration / stats_quanta)
-    while len(value_by_quanta) <= quanta:
-      value_by_quanta.append(value_by_quanta[-1])
-
-    if value_by_quanta[-1] is no_data:
-      value_by_quanta[-1] = extract_fn(dr)
-    else:
-      value_by_quanta[-1] = combine_fn(value_by_quanta[-1], extract_fn(dr))
-
-  return value_by_quanta
-
-
-def get_all_labels():
-  """
-  Returns,
-    List of labels that are in the complete state
-  """
-  dbs = get_dbs(os.getcwd())
-  all_labels = list()
-  for db in dbs:
-    all_labels.extend(db.query(resultsdb.models.TuningRun.name)
-                        .filter_by(state='COMPLETE')
-                        .distinct()
-                        .all())
-  all_labels = [str(element[0]) for element in all_labels]
-  return all_labels
-
-
-def get_values(labels):
-  """
-  Arguments,
-    labels: List of labels whose values are of interest
-  Returns,
-    A list of (mean, percentile) tuples, corresponding to the
-    provided list of labels
-  """
-  dbs = get_dbs(os.getcwd())
-  dir_label_runs = defaultdict(lambda: defaultdict(list))
-  for db in dbs:
-    q = (db.query(resultsdb.models.TuningRun)
-            .filter_by(state='COMPLETE')
-            .order_by('name'))
-    if labels:
-      q = q.filter(resultsdb.models.TuningRun.name.in_(labels))
-    for tr in q:
-      dir_label_runs[run_label(tr)][run_label(tr)].append((tr, db))
-  all_run_ids = list()
-  returned_values = {}
-  for d, label_runs in dir_label_runs.iteritems():
-    all_run_ids = map(_[0].id, itertools.chain(*label_runs.values()))
-    session = label_runs.values()[0][0][1]
-    objective = label_runs.values()[0][0][0].objective
-
-    q = (session.query(resultsdb.models.Result)
-         .filter(resultsdb.models.Result.tuning_run_id.in_(all_run_ids))
-         .filter(resultsdb.models.Result.time < float('inf'))
-         .filter_by(was_new_best=True, state='OK'))
-    total = q.count()
-    q = objective.filter_acceptable(q)
-    acceptable = q.count()
-    q = q.order_by(*objective.result_order_by_terms())
-    best = q.limit(1).one()
-    worst = q.offset(acceptable - 1).limit(1).one()
-
-    for label, runs in sorted(label_runs.items()):
-      (mean_values, percentile_values) = combined_stats_over_time(label, runs, objective, worst, best)
-      returned_values[label] = (mean_values, percentile_values)
-      final_scores = list()
-      for run, session in runs:
-        try:
-          final = (session.query(resultsdb.models.Result)
-                  .filter_by(tuning_run = run,
-                             configuration = run.final_config)
-                  .limit(1).one())
-        except sqlalchemy.orm.exc.NoResultFound:
-          continue
-        final_scores.append(objective.stats_quality_score(final, worst, best))
-      final_scores.sort()
-  return returned_values
-
-if __name__ == '__main__':
-    labels = [u'timeouts', u'always_reorder', u'add_store_at', u'all_options']
-    get_values(labels)
-    print get_all_labels()
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/optional-requirements.txt b/llvm/projects/hpvm-tensor-rt/opentuner/optional-requirements.txt
deleted file mode 100644
index 9848f674cb6e5ca1faba757abd98eb5066e4688d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/optional-requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-django==1.6.1
-matplotlib==1.1.1
-virtualenv==1.9.1
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/requirements.txt b/llvm/projects/hpvm-tensor-rt/opentuner/requirements.txt
deleted file mode 100644
index fa9cfeca2ede04002798fea0db669de3c87879d4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-argparse>=1.2.1
-fn>=0.2.12
-numpy>=1.8.0
-pysqlite>=2.6.3
-SQLAlchemy>=0.8.2
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/setup.py b/llvm/projects/hpvm-tensor-rt/opentuner/setup.py
deleted file mode 100755
index 633d4359d9e9655b5241521208fecc37bc4ab65f..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/setup.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-try:
-    from setuptools import setup
-except ImportError:
-    try:
-        from setuptools.core import setup
-    except ImportError:
-        from distutils.core import setup
-
-try:
-    from pypandoc import convert
-    read_md = lambda f: convert(f, 'rest')
-except ImportError:
-    print("warning: pypandoc module not found, could not convert Markdown to RST")
-    read_md = lambda f: open(f, 'r').read()
-
-required = open('requirements.txt').read().splitlines()
-required = [l.strip() for l in required
-            if l.strip() and not l.strip().startswith('#')]
-
-setup(
-    name='opentuner',
-    version='0.8.0',
-    url='http://opentuner.org/',
-    license='MIT',
-    author='Jason Ansel',
-    author_email='jansel@jansel.net',
-    description='An extensible framework for program autotuning',
-    long_description=read_md('README.md'),
-    packages=['opentuner', 'opentuner.resultsdb', 'opentuner.utils',
-              'opentuner.measurement', 'opentuner.search'],
-    install_requires=required,
-)
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/manage.py b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/manage.py
deleted file mode 100644
index f27b5b8db13b490f7599856364f59c6fedcbfe6e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/manage.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env python
-import os
-import sys
-
-if __name__ == "__main__":
-    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "stats_app.settings")
-
-    from django.core.management import execute_from_command_line
-
-    execute_from_command_line(sys.argv)
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/__init__.py b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/settings.py b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/settings.py
deleted file mode 100644
index 09505be03e5621e4df952e878b52973da9588ffc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/settings.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Django settings for stats_app project.
-import os
-
-DEBUG = True
-TEMPLATE_DEBUG = DEBUG
-
-ADMINS = (
-    # ('Your Name', 'your_email@example.com'),
-)
-
-MANAGERS = ADMINS
-DIRECTORY_NAME = os.path.dirname(os.path.realpath(__file__))
-
-DATABASES = {
-    'default': {
-        'ENGINE': 'django.db.backends.sqlite3',  # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
-        'NAME': DIRECTORY_NAME + '/db',      # Or path to database file if using sqlite3.
-        # The following settings are not used with sqlite3:
-        'USER': '',
-        'PASSWORD': '',
-        'HOST': '',                      # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
-        'PORT': '',                      # Set to empty string for default.
-    }
-}
-
-# Hosts/domain names that are valid for this site; required if DEBUG is False
-# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
-ALLOWED_HOSTS = []
-
-# Local time zone for this installation. Choices can be found here:
-# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
-# although not all choices may be available on all operating systems.
-# In a Windows environment this must be set to your system time zone.
-TIME_ZONE = 'America/Chicago'
-
-# Language code for this installation. All choices can be found here:
-# http://www.i18nguy.com/unicode/language-identifiers.html
-LANGUAGE_CODE = 'en-us'
-
-SITE_ID = 1
-
-# If you set this to False, Django will make some optimizations so as not
-# to load the internationalization machinery.
-USE_I18N = True
-
-# If you set this to False, Django will not format dates, numbers and
-# calendars according to the current locale.
-USE_L10N = True
-
-# If you set this to False, Django will not use timezone-aware datetimes.
-USE_TZ = True
-
-# Absolute filesystem path to the directory that will hold user-uploaded files.
-# Example: "/var/www/example.com/media/"
-MEDIA_ROOT = ''
-
-# URL that handles the media served from MEDIA_ROOT. Make sure to use a
-# trailing slash.
-# Examples: "http://example.com/media/", "http://media.example.com/"
-MEDIA_URL = ''
-
-# Absolute path to the directory static files should be collected to.
-# Don't put anything in this directory yourself; store your static files
-# in apps' "static/" subdirectories and in STATICFILES_DIRS.
-# Example: "/var/www/example.com/static/"
-STATIC_ROOT = ''
-
-# URL prefix for static files.
-# Example: "http://example.com/static/", "http://static.example.com/"
-STATIC_URL = '/static/'
-
-# Additional locations of static files
-STATICFILES_DIRS = (
-    # Put strings here, like "/home/html/static" or "C:/www/django/static".
-    # Always use forward slashes, even on Windows.
-    # Don't forget to use absolute paths, not relative paths.
-    DIRECTORY_NAME + '/static',
-)
-
-# List of finder classes that know how to find static files in
-# various locations.
-STATICFILES_FINDERS = (
-    'django.contrib.staticfiles.finders.FileSystemFinder',
-    'django.contrib.staticfiles.finders.AppDirectoriesFinder',
-#    'django.contrib.staticfiles.finders.DefaultStorageFinder',
-)
-
-# Make this unique, and don't share it with anybody.
-SECRET_KEY = 't!!j*1gt0(5n%6nj-lirzja-9uj6s86s#0@kp2@8v&x#+c2+c-'
-
-# List of callables that know how to import templates from various sources.
-TEMPLATE_LOADERS = (
-    'django.template.loaders.filesystem.Loader',
-    'django.template.loaders.app_directories.Loader',
-#     'django.template.loaders.eggs.Loader',
-)
-
-MIDDLEWARE_CLASSES = (
-    'django.middleware.common.CommonMiddleware',
-    'django.contrib.sessions.middleware.SessionMiddleware',
-    'django.middleware.csrf.CsrfViewMiddleware',
-    'django.contrib.auth.middleware.AuthenticationMiddleware',
-    'django.contrib.messages.middleware.MessageMiddleware',
-    # Uncomment the next line for simple clickjacking protection:
-    # 'django.middleware.clickjacking.XFrameOptionsMiddleware',
-)
-
-ROOT_URLCONF = 'stats_app.urls'
-
-# Python dotted path to the WSGI application used by Django's runserver.
-WSGI_APPLICATION = 'stats_app.wsgi.application'
-
-TEMPLATE_DIRS = (
-    # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
-    # Always use forward slashes, even on Windows.
-    # Don't forget to use absolute paths, not relative paths.
-    DIRECTORY_NAME + '/templates',
-)
-
-INSTALLED_APPS = (
-    'django.contrib.auth',
-    'django.contrib.contenttypes',
-    'django.contrib.sessions',
-    'django.contrib.sites',
-    'django.contrib.messages',
-    'django.contrib.staticfiles',
-    # Uncomment the next line to enable the admin:
-    'django.contrib.admin',
-    # Uncomment the next line to enable admin documentation:
-    'django.contrib.admindocs',
-)
-
-SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
-
-# A sample logging configuration. The only tangible logging
-# performed by this configuration is to send an email to
-# the site admins on every HTTP 500 error when DEBUG=False.
-# See http://docs.djangoproject.com/en/dev/topics/logging for
-# more details on how to customize your logging configuration.
-LOGGING = {
-    'version': 1,
-    'disable_existing_loggers': False,
-    'filters': {
-        'require_debug_false': {
-            '()': 'django.utils.log.RequireDebugFalse'
-        }
-    },
-    'handlers': {
-        'mail_admins': {
-            'level': 'ERROR',
-            'filters': ['require_debug_false'],
-            'class': 'django.utils.log.AdminEmailHandler'
-        }
-    },
-    'loggers': {
-        'django.request': {
-            'handlers': ['mail_admins'],
-            'level': 'ERROR',
-            'propagate': True,
-        },
-    }
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/static/charts.css b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/static/charts.css
deleted file mode 100644
index e32e2832aeac39540f2fb2c39e3817b6ab85cf3b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/static/charts.css
+++ /dev/null
@@ -1,11 +0,0 @@
-img.center {
-display: block;
-margin-left: auto;
-margin-right: auto;
-
-padding: 8px;
-border: solid;
-border-color: #dddddd #aaaaaa #aaaaaa #dddddd;
-border-width: 1px 2px 2px 1px;
-background-color: white;
-}
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/templates/charts.html b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/templates/charts.html
deleted file mode 100644
index d38bb4c0c6c7c31d9cfce0bbc74d57f7601b3c83..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/templates/charts.html
+++ /dev/null
@@ -1,41 +0,0 @@
-<!DOCTYPE html>
-<head>
-<link rel="stylesheet" type="text/css" href="{{ STATIC_URL }}charts.css" media="screen" />
-<html lang="en">
-  <title>{% block title %}Graph{% endblock %}</title>
-</head>
-
-<body>
-  <div id="graphForm">
-  <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js"> </script>
-  <script>
-    function callback() {{
-      var values = $('#graphForm form').serialize();
-      $('#graphForm img').attr("src", "graph.png?" + values);
-    }}
-  </script>
-  <p style="text-align:center">
-  <img src="graph.png" id="graph" />
-  </p>
-  <form method = "GET" action="" style="text-align:center">
-    <h3>X Limits:</h3>
-    <input type="range" name="xlim" min="0" max="10000">
-    <br><h3>Y Limits:</h3>
-    <input type="range" name="ylim" min="0" max="20">
-    <br>
-    <h3>Labels:</h3>
-    {0}
-    <br>
-    <h3>Measure:</h3>
-    <b>Mean:</b>
-    <input type="checkbox" name="disp_type" value="mean">
-    <b>Median:</b>
-    <input type="checkbox" name="disp_type" value="median">
-    <b>All percentiles:</b>
-    <input type="checkbox" name="disp_type" value="all_percentiles">
-    <br>
-    <input type="button" value="Graph!" onclick="callback()">
-  </form>
-  </div>
-</body>
-</html>
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/urls.py b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/urls.py
deleted file mode 100644
index 15743290ec27ae4f2a4e633e80483d972190c870..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/urls.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from django.conf.urls import patterns, include, url
-
-# Uncomment the next two lines to enable the admin:
-from django.contrib import admin
-import views.charts
-admin.autodiscover()
-
-urlpatterns = patterns('',
-    # Examples:
-    # url(r'^$', 'stats_app.views.home', name='home'),
-    # url(r'^stats_app/', include('stats_app.foo.urls')),
-
-    # Uncomment the admin/doc line below to enable admin documentation:
-    # url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
-
-    # Uncomment the next line to enable the admin:
-    url(r'^admin/', include(admin.site.urls)),
-    url(r'^graph.png$', views.charts.display_graph, name='graph'),
-    url(r'^$', views.charts.display_full_page, name='graph_page'),
-)
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/views.py b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/views.py
deleted file mode 100644
index 7cb32b3655aa032c745e97a088ad80365dd9c551..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/views.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from django.http import HttpResponse
-
-
-def index(request):
-    return HttpResponse("Hello, world. You're at the stats application index.")
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/views/__init__.py b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/views/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/views/charts.py b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/views/charts.py
deleted file mode 100644
index c3a2ebff32281967f11640ef16a39353ca501d1a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/views/charts.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import datetime
-import django
-from django.shortcuts import render
-from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
-from matplotlib.dates import DateFormatter
-from matplotlib.figure import Figure
-import random
-
-from opentuner.utils import stats_matplotlib as stats
-
-
-def display_graph(request):
-  """
-  Handles request to display graph with provided parameters
-  """
-  request_dict = dict(request.GET.iterlists())
-
-  xlim = request_dict.get('xlim', None)
-  if xlim:
-    xlim = int(xlim[0])
-  else:
-    xlim = 5000
-  xlim = [0, xlim]
-
-  ylim = request_dict.get('ylim', None)
-  if ylim:
-    ylim = int(ylim[0])
-  else:
-    ylim = 10
-  ylim = [0, ylim]
-
-  labels = request_dict.get('labels', None)
-
-  disp_types = request_dict.get('disp_type', None)
-  if not disp_types:
-    disp_types = ['median']
-
-  fig = stats.matplotlibplot_file(labels, xlim=xlim, ylim=ylim, disp_types=disp_types)
-  canvas = FigureCanvas(fig)
-  response = django.http.HttpResponse(content_type='image/png')
-  canvas.print_png(response)
-  return response
-
-
-def display_full_page(request):
-  """
-  Handles request to display the full page
-  """
-  all_labels = stats.get_all_labels()
-  label_list = get_label_list(all_labels)
-  html = render(request, 'charts.html')
-  content = html.content
-  content = content.format(label_list)
-  html.content = content
-  return html
-
-
-def get_label_list(all_labels):
-  """
-  Returns list of html form inputs corresponding to the different
-  labels in the provided db file
-  """
-  label_list = ''
-  for label in all_labels:
-    label_list += '<b>%s</b>:<input type="checkbox" name="labels" value="%s">' % (label, label)
-  return label_list
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/wsgi.py b/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/wsgi.py
deleted file mode 100644
index 90f54d8e3dd53cadeeb3eafa33e1abd734485cd0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/stats_app/stats_app/wsgi.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""
-WSGI config for stats_app project.
-
-This module contains the WSGI application used by Django's development server
-and any production WSGI deployments. It should expose a module-level variable
-named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
-this application via the ``WSGI_APPLICATION`` setting.
-
-Usually you will have the standard Django WSGI application here, but it also
-might make sense to replace the whole Django WSGI application with a custom one
-that later delegates to the Django one. For example, you could introduce WSGI
-middleware here, or combine a Django application with an application of another
-framework.
-
-"""
-import os
-
-# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
-# if running multiple sites in the same mod_wsgi process. To fix this, use
-# mod_wsgi daemon mode with each site in its own daemon process, or use
-# os.environ["DJANGO_SETTINGS_MODULE"] = "stats_app.settings"
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "stats_app.settings")
-
-# This application object is used by any WSGI server configured to use this
-# file. This includes Django's development server, if the WSGI_APPLICATION
-# setting points here.
-from django.core.wsgi import get_wsgi_application
-application = get_wsgi_application()
-
-# Apply WSGI middleware here.
-# from helloworld.wsgi import HelloWorldApplication
-# application = HelloWorldApplication(application)
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/tests/test_manipulator.py b/llvm/projects/hpvm-tensor-rt/opentuner/tests/test_manipulator.py
deleted file mode 100644
index c6548510fdc40036978728a1c2ad06ff05a6b9e9..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/tests/test_manipulator.py
+++ /dev/null
@@ -1,270 +0,0 @@
-import unittest
-import opentuner
-import mock
-import random
-import numpy
-from opentuner.search import manipulator
-
-def faked_random(nums):
-    f = fake_random(nums)
-    def inner(*args, **kwargs):
-        return f.next()
-    return inner
-
-def fake_random(nums):
-    i = 0
-    while True:
-        yield nums[i]
-        i = (i+1) % len(nums)
-
-
-class PermutationOperatorTests(unittest.TestCase):
-
-    def setUp(self):
-        """
-        Set up a few configurations. The values of the PermutationParameter are:
-        config1 - 0 1 2 3 4 5 6 7 8 9
-        config2 - 4 3 2 1 0 9 8 7 6 5
-        config3 - 1 0 4 2 7 9 5 3 6 8
-
-        """
-        self.manipulator = manipulator.ConfigurationManipulator()
-        self.param1 = manipulator.PermutationParameter("param1", [0,1,2,3,4,5,6,7,8,9])
-        self.manipulator.add_parameter(self.param1)
-
-        self.cfg = self.manipulator.seed_config()
-        self.config1 = self.manipulator.seed_config()
-        self.config2 = self.manipulator.seed_config()
-        self.config3 = self.manipulator.seed_config()
-
-        # repeating values
-        self.config4 = self.manipulator.seed_config()
-        self.config5 = self.manipulator.seed_config()
-
-
-        self.param1.set_value(self.config1, [0,1,2,3,4,5,6,7,8,9])
-        self.param1.set_value(self.config2, [4,3,2,1,0,9,8,7,6,5])
-        self.param1.set_value(self.config3, [1,0,4,2,7,9,5,3,6,8])
-
-        # repeating values
-        self.param1.set_value(self.config4, [1,2,3,4,2,3,4,3,4,4])
-        self.param1.set_value(self.config5, [4,2,4,3,3,1,3,4,2,4])
-
-    @mock.patch('random.randint', side_effect=faked_random([1,6]))
-    def test_op2_random_swap_1_6(self, randint_func):
-        # operator shouuld swap the indices at 1 and 6
-        self.param1.op2_random_swap(self.cfg, self.config1)
-
-        self.assertEqual(self.param1.get_value(self.cfg),[0,6,2,3,4,5,1,7,8,9])
-        self.assertEqual(self.param1.get_value(self.config1),[0,1,2,3,4,5,6,7,8,9])
-
-
-    @mock.patch('random.randint', side_effect=faked_random([7,2]))
-    def test_op2_random_invert(self, randint_func):
-        #should reverse a section of length 3 starting at index given by randint
-        self.param1.op2_random_invert(self.cfg, self.config1)
-        self.assertEqual(self.param1.get_value(self.cfg),[0,1,2,3,4,5,6,9,8,7])
-
-        self.param1.op2_random_invert(self.cfg, self.config1)
-        self.assertEqual(self.param1.get_value(self.cfg),[0,1,4,3,2,5,6,7,8,9])
-
-
-    @mock.patch('random.randint', side_effect=faked_random([0]))
-    def test_op3_cross_PMX_str5(self, randint_func):
-        # should perform PMX with a cut at 0 and crossover size 5
-        self.param1.op3_cross(self.cfg, self.config1, self.config3,
-                                xchoice='op3_cross_PMX', strength=0.5)
-        self.assertEqual(self.param1.get_value(self.cfg),[1,0,4,2,7,5,6,3,8,9])
-
-    @mock.patch('random.randint', side_effect=faked_random([5]))
-    @mock.patch('random.uniform', side_effect=faked_random([0.4]))
-    def test_op3_swarm_CX_no_cross(self, uniform_func, randint_func):
-        # should perform no cross
-        self.param1.op3_swarm(self.config1, self.config2, self.config3,
-                                xchoice='op3_cross_CX', c=0.8)
-        self.assertEqual(self.param1.get_value(self.config1),[0,1,2,3,4,5,6,7,8,9])
-
-
-    @mock.patch('random.randint', side_effect=faked_random([5]))
-    @mock.patch('random.uniform', side_effect=faked_random([0.4]))
-    def test_op3_swarm_CX_cross_p1(self, uniform_func, randint_func):
-        # should cross the first parent
-        self.param1.op3_swarm(self.config1, self.config2, self.config3,
-                                xchoice='op3_cross_CX', c=0.3, c1=0.5, c2="unused")
-        self.assertEqual(self.param1.get_value(self.config1),[0,1,2,3,4,9,6,7,8,5])
-
-    @mock.patch('random.randint', side_effect=faked_random([5]))
-    @mock.patch('random.uniform', side_effect=faked_random([0.4]))
-    def test_op3_swarm_CX_cross_p2(self, uniform_func, randint_func):
-        # should cross the second parent
-        self.param1.op3_swarm(self.config1, self.config2, self.config3,
-                                xchoice='op3_cross_CX', c=0.3, c1=0.3, c2="unused")
-        self.assertEqual(self.param1.get_value(self.config1),[0,1,2,3,4,9,5,7,6,8])
-
-
-    @mock.patch('random.randint', side_effect=faked_random([5]))
-    def test_op3_cross_PX_5(self, randint_func):
-        # Random cut point = 5 (index = 4)
-        self.param1.op3_cross_PX(self.cfg, self.config1, self.config3, 2)
-        self.assertEqual(self.param1.get_value(self.cfg),[1,0,4,2,3,5,6,7,8,9])
-
-    @mock.patch('random.randint', side_effect=faked_random([2]))
-    def test_op3_cross_PMX_0_d4(self, randint_func):
-        # cut = 2, d = 4
-        self.param1.op3_cross_PMX(self.cfg, self.config2, self.config3, 4)
-        self.assertEqual(self.param1.get_value(self.cfg),[1,3,4,2,7,9,8,0,6,5])
-
-
-    @mock.patch('random.randint', side_effect=faked_random([0]))
-    def test_op3_cross_PMX_0_d5(self, randint_func):
-        # cut = 0, d = 5
-        self.param1.op3_cross_PMX(self.cfg, self.config1, self.config3, 5)
-        self.assertEqual(self.param1.get_value(self.cfg),[1,0,4,2,7,5,6,3,8,9])
-
-    @mock.patch('random.randint', side_effect=faked_random([4]))
-    def test_op3_cross_PMX_dups(self, randint_func):
-        # cut = 4, d = 5
-        self.param1.op3_cross_PMX(self.cfg, self.config5, self.config4, 5)
-
-        # [4,2,4,3,3,1,3,4,2,4]
-        # [1,2,3,4,2,3,4,3,4,4]
-        # expected:
-        # [1,2,4,3,2,3,4,3,4,4]
-
-        self.assertEqual(self.param1.get_value(self.cfg), [1,2,4,3,2,3,4,3,4,4])
-
-
-    @mock.patch('random.randint', side_effect=faked_random([5]))
-    def test_op3_cross_CX_5(self, randint_func):
-        # initial replacement at index 5
-        self.param1.op3_cross_CX(self.cfg, self.config1, self.config2, "unused")
-        self.assertEqual(self.param1.get_value(self.cfg),[0,1,2,3,4,9,6,7,8,5])
-        self.param1.op3_cross_CX(self.cfg, self.config1, self.config3, "unused")
-        self.assertEqual(self.param1.get_value(self.cfg),[0,1,2,3,4,9,5,7,6,8])
-
-    @mock.patch('random.randint', side_effect=faked_random([0]))
-    def test_op3_cross_CX_dups(self, randint_func):
-        # initial replacement at index 4
-        self.param1.op3_cross_CX(self.cfg, self.config5, self.config4, "unused")
-
-        # [4,2,4,3,3,1,3,4,2,4]
-        # [1,2,3,4,2,3,4,3,4,4]
-        # expected:
-        # [1,2,3,4,3,3,4,4,2,4]
-
-        self.assertEqual(self.param1.get_value(self.cfg), [1,2,3,4,3,3,4,4,2,4])
-
-
-    @mock.patch('random.randint', side_effect=faked_random([3]))
-    def test_op3_cross_OX1_3_d4(self, randint_func):
-        # cut at 3
-        # d = 4
-        self.param1.op3_cross_OX1(self.cfg, self.config1, self.config2, 4)
-        self.assertEqual(self.param1.get_value(self.cfg),[2,3,4,1,0,9,8,5,6,7])
-        self.param1.op3_cross_OX1(self.cfg, self.config1, self.config3, 4)
-        self.assertEqual(self.param1.get_value(self.cfg),[0,1,3,2,7,9,5,4,6,8])
-
-    @mock.patch('random.randint', side_effect=faked_random([4,2]))
-    def test_op3_cross_OX3_2_5_d4(self, randint_func):
-        # cuts at 4,2
-        # d = 4
-        self.param1.op3_cross_OX3(self.cfg, self.config1, self.config2, 4)
-        self.assertEqual(self.param1.get_value(self.cfg),[3,4,5,6,2,1,0,9,7,8])
-        self.param1.op3_cross_OX3(self.cfg, self.config1, self.config3, 4)
-        self.assertEqual(self.param1.get_value(self.cfg),[0,1,3,5,4,2,7,9,6,8])
-
-
-class FloatArrayOperatorTests(unittest.TestCase):
-    """
-    also tests the operators for Array (since Array is abstract)
-    """
-
-    def setUp(self):
-        """
-        Set up a few configurations. The values of the FloatArray are:
-        config1 - 1.0 1.1 1.2 1.3 1.4 1.5 1.6 1.7 1.8 1.9
-        config2 - 2.0 2.1 2.2 2.3 2.4 2.5 2.6 2.7 2.8 2.9
-        config3 - 3.0 3.1 3.2 3.3 3.4 3.5 3.6 3.7 3.8 3.9
-        """
-        self.manipulator = manipulator.ConfigurationManipulator()
-        self.param1 = manipulator.FloatArray("param1", 10, 4, 0)
-        self.manipulator.add_parameter(self.param1)
-
-        self.cfg = self.manipulator.seed_config()
-        self.config1 = self.manipulator.seed_config()
-        self.config2 = self.manipulator.seed_config()
-        self.config3 = self.manipulator.seed_config()
-
-        self.param1.set_value(self.config1, numpy.array([1.0,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9]))
-        self.param1.set_value(self.config2, numpy.array([2.0,2.1,2.2,2.3,2.4,2.5,2.6,2.7,2.8,2.9]))
-        self.param1.set_value(self.config3, numpy.array([3.0,3.1,3.2,3.3,3.4,3.5,3.6,3.7,3.8,3.8]))
-
-
-    @mock.patch('random.randint', side_effect=faked_random([3]))
-    def test_op3_cross_3_str4(self, randint_func):
-        self.param1.op3_cross(self.cfg, self.config1, self.config2, strength=0.4)
-
-        val = self.param1.get_value(self.cfg)
-        expected = [1.0,1.1,1.2,2.3,2.4,2.5,2.6,1.7,1.8,1.9]
-        for i in range(len(val)):
-            self.assertAlmostEqual(val[i], expected[i])
-
-    @mock.patch('random.randint', side_effect=faked_random([3]))
-    @mock.patch('random.uniform', side_effect=faked_random([0.4]))
-    def test_op3_swarm_no_cross(self, uniform_func, randint_func):
-        #should perform no cross
-        self.param1.op3_swarm(self.config1, self.config2, self.config3,
-                                xchoice='op3_cross_CX', c=0.8)
-        val = self.param1.get_value(self.config1)
-        expected = [1.0,1.1,1.2,1.3,1.4,1.5,1.6,1.7,1.8,1.9]
-        for i in range(len(val)):
-            self.assertAlmostEqual(val[i], expected[i])
-
-
-    @mock.patch('random.randint', side_effect=faked_random([3]))
-    @mock.patch('random.uniform', side_effect=faked_random([0.4]))
-    def test_op3_swarm_cross_p1(self, uniform_func, randint_func):
-        #should cross the first parent
-        self.param1.op3_swarm(self.config1, self.config2, self.config3,
-                                xchoice='op3_cross_CX', c=0.3, c1=0.5, c2="unused")
-        val = self.param1.get_value(self.config1)
-        expected = [1.0,1.1,1.2,2.3,2.4,2.5,1.6,1.7,1.8,1.9]
-        for i in range(len(val)):
-            self.assertAlmostEqual(val[i], expected[i])
-
-
-    @mock.patch('random.randint', side_effect=faked_random([3]))
-    @mock.patch('random.uniform', side_effect=faked_random([0.4]))
-    def test_op3_swarm_cross_p2(self, uniform_func, randint_func):
-        #should cross the second parent
-        self.param1.op3_swarm(self.config1, self.config2, self.config3,
-                                xchoice='op3_cross_CX', c=0.3, c1=0.3, c2="unused")
-        val = self.param1.get_value(self.config1)
-        expected = [1.0,1.1,1.2,3.3,3.4,3.5,1.6,1.7,1.8,1.9]
-        self.assertEqual(len(val),len(expected))
-        for i in range(len(val)):
-            self.assertAlmostEqual(val[i], expected[i])
-
-    @mock.patch('random.random', side_effect=faked_random([0.2, 0.4]))
-    def test_op3_swarm_parallel(self, random_func):
-        # r1 = 0.2, r2 = 0.4, velocities = [-2,0,0,0,0,0,1,1.5,2,3]
-        # max and min are 4, 0
-        velocities = numpy.array([-2.0,0.0,0,0,0,0,1.0,1.5,2,3.0])
-
-        vs = self.param1.op3_swarm_parallel(self.config1, self.config2, self.config3, velocities=velocities)
-        vs_expected = [-1.5,.5,.5,.5,.5,.5,1.5,2.0,2.5,3.48]
-
-        self.assertEqual(len(vs),len(vs_expected))
-
-        for i in range(len(vs)):
-            self.assertAlmostEqual(vs[i], vs_expected[i])
-
-
-        val = self.param1.get_value(self.config1)
-        expected = [0,1.6,1.7,1.8,1.9,2.0,3.1,3.7,4,4]
-        self.assertEqual(len(val),len(expected))
-        for i in range(len(val)):
-            self.assertAlmostEqual(val[i], expected[i])
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/tests/test_technique.py b/llvm/projects/hpvm-tensor-rt/opentuner/tests/test_technique.py
deleted file mode 100644
index c6107bace942a5ac85533878131fb953439ea3f7..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/tests/test_technique.py
+++ /dev/null
@@ -1,77 +0,0 @@
-import unittest
-import opentuner
-import mock
-from opentuner.search.composableevolutionarytechniques import ComposableEvolutionaryTechnique
-from opentuner.search import manipulator
-
-def faked_random(nums):
-  f = fake_random(nums)
-  def inner(*args, **kwargs):
-    return f.next()
-  return inner
-
-def fake_random(nums):
-  i = 0
-  while True:
-    yield nums[i]
-    i = (i+1) % len(nums)
-
-class EmptyComposableEvolutionaryTechnique(ComposableEvolutionaryTechnique):
-  def __init__(self, *pargs, **kwargs):
-    super(EmptyComposableEvolutionaryTechnique, self).__init__(*pargs, **kwargs)
-
-  def minimum_number_of_parents(self):
-    return 4
-
-  def get_parents(self, population):
-    cfg = self.manipulator.copy(population[0].config)
-
-    return [cfg]
-
-  def update_population(self, config, population):
-    # replace the oldest configuration if the new one is better.
-    population[0].config = config
-
-    return population
-
-class ComposableSearchTechniqueTests(unittest.TestCase):
-
-  def setUp(self):
-    self.operator_map = {}
-    ComposableEvolutionaryTechnique.add_to_map(self.operator_map,
-                                  manipulator.PermutationParameter,
-                                  "op3_cross", xchoice='op3_cross_CX')
-    ComposableEvolutionaryTechnique.add_to_map(self.operator_map,
-                                  "FloatArray",
-                                  "op3_cross", strength=0.4)
-    self.technique = EmptyComposableEvolutionaryTechnique(operator_map = self.operator_map)
-
-  def test_add_to_map(self):
-    op_map = {}
-    op_map[manipulator.PermutationParameter] = {'op_name': 'op3_cross',
-                                                'args': (),
-                                                'kwargs': {'xchoice': 'op3_cross_CX'}}
-    op_map[manipulator.FloatArray] = {'op_name': 'op3_cross',
-                                        'args': (),
-                                        'kwargs': {'strength': 0.4}}
-    self.assertDictEqual(self.operator_map, op_map)
-
-  def test_get_default_oeprator(self):
-    default = self.technique.get_default_operator(manipulator.PermutationParameter)
-    self.assertDictEqual(default, {'op_name': 'op1_nop', 'args': [], 'kwargs': {}})
-
-
-  def test_get_operator(self):
-    default = self.technique.get_operator(manipulator.IntegerParameter)
-    self.assertDictEqual(default, {'op_name': 'op1_nop', 'args': [], 'kwargs': {}})
-
-    default = self.technique.get_operator(manipulator.PermutationParameter)
-    self.assertDictEqual(default, {'op_name': 'op3_cross','args': (),'kwargs': {'xchoice': 'op3_cross_CX'}})
-
-  @mock.patch('opentuner.search.manipulator.PermutationParameter.op3_cross')
-  def test_apply_operator(self, op3_cross_func):
-    param_instance = manipulator.PermutationParameter('temp', [1,2,3,4,5])
-    self.technique.apply_operator(param_instance, ['p1', 'p2', 'p3', 'p4'])
-    op3_cross_func.assert_called_once_with('p1', 'p2', 'p3', xchoice='op3_cross_CX')
-
-#TODO tests for RandomThreeParentsComposableTechnique
diff --git a/llvm/projects/hpvm-tensor-rt/opentuner/venv-bootstrap.py b/llvm/projects/hpvm-tensor-rt/opentuner/venv-bootstrap.py
deleted file mode 100755
index 6d6ad0113b72ffe5610b28ac2717442cba6eff8c..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/opentuner/venv-bootstrap.py
+++ /dev/null
@@ -1,2611 +0,0 @@
-#!/usr/bin/env python
-## WARNING: This file is generated
-#!/usr/bin/env python
-"""Create a "virtual" Python installation
-"""
-
-# If you change the version here, change it in setup.py
-# and docs/conf.py as well.
-__version__ = "1.9.1"  # following best practices
-virtualenv_version = __version__  # legacy, again
-
-import base64
-import sys
-import os
-import codecs
-import optparse
-import re
-import shutil
-import logging
-import tempfile
-import zlib
-import errno
-import glob
-import distutils.sysconfig
-from distutils.util import strtobool
-import struct
-import subprocess
-
-if sys.version_info < (2, 6):
-    print('ERROR: %s' % sys.exc_info()[1])
-    print('ERROR: this script requires Python 2.6 or greater.')
-    sys.exit(101)
-
-try:
-    set
-except NameError:
-    from sets import Set as set
-try:
-    basestring
-except NameError:
-    basestring = str
-
-try:
-    import ConfigParser
-except ImportError:
-    import configparser as ConfigParser
-
-join = os.path.join
-py_version = 'python%s.%s' % (sys.version_info[0], sys.version_info[1])
-
-is_jython = sys.platform.startswith('java')
-is_pypy = hasattr(sys, 'pypy_version_info')
-is_win = (sys.platform == 'win32')
-is_cygwin = (sys.platform == 'cygwin')
-is_darwin = (sys.platform == 'darwin')
-abiflags = getattr(sys, 'abiflags', '')
-
-user_dir = os.path.expanduser('~')
-if is_win:
-    default_storage_dir = os.path.join(user_dir, 'virtualenv')
-else:
-    default_storage_dir = os.path.join(user_dir, '.virtualenv')
-default_config_file = os.path.join(default_storage_dir, 'virtualenv.ini')
-
-if is_pypy:
-    expected_exe = 'pypy'
-elif is_jython:
-    expected_exe = 'jython'
-else:
-    expected_exe = 'python'
-
-
-REQUIRED_MODULES = ['os', 'posix', 'posixpath', 'nt', 'ntpath', 'genericpath',
-                    'fnmatch', 'locale', 'encodings', 'codecs',
-                    'stat', 'UserDict', 'readline', 'copy_reg', 'types',
-                    're', 'sre', 'sre_parse', 'sre_constants', 'sre_compile',
-                    'zlib']
-
-REQUIRED_FILES = ['lib-dynload', 'config']
-
-majver, minver = sys.version_info[:2]
-if majver == 2:
-    if minver >= 6:
-        REQUIRED_MODULES.extend(['warnings', 'linecache', '_abcoll', 'abc'])
-    if minver >= 7:
-        REQUIRED_MODULES.extend(['_weakrefset'])
-    if minver <= 3:
-        REQUIRED_MODULES.extend(['sets', '__future__'])
-elif majver == 3:
-    # Some extra modules are needed for Python 3, but different ones
-    # for different versions.
-    REQUIRED_MODULES.extend(['_abcoll', 'warnings', 'linecache', 'abc', 'io',
-                             '_weakrefset', 'copyreg', 'tempfile', 'random',
-                             '__future__', 'collections', 'keyword', 'tarfile',
-                             'shutil', 'struct', 'copy', 'tokenize', 'token',
-                             'functools', 'heapq', 'bisect', 'weakref',
-                             'reprlib'])
-    if minver >= 2:
-        REQUIRED_FILES[-1] = 'config-%s' % majver
-    if minver == 3:
-        import sysconfig
-        platdir = sysconfig.get_config_var('PLATDIR')
-        REQUIRED_FILES.append(platdir)
-        # The whole list of 3.3 modules is reproduced below - the current
-        # uncommented ones are required for 3.3 as of now, but more may be
-        # added as 3.3 development continues.
-        REQUIRED_MODULES.extend([
-            #"aifc",
-            #"antigravity",
-            #"argparse",
-            #"ast",
-            #"asynchat",
-            #"asyncore",
-            "base64",
-            #"bdb",
-            #"binhex",
-            #"bisect",
-            #"calendar",
-            #"cgi",
-            #"cgitb",
-            #"chunk",
-            #"cmd",
-            #"codeop",
-            #"code",
-            #"colorsys",
-            #"_compat_pickle",
-            #"compileall",
-            #"concurrent",
-            #"configparser",
-            #"contextlib",
-            #"cProfile",
-            #"crypt",
-            #"csv",
-            #"ctypes",
-            #"curses",
-            #"datetime",
-            #"dbm",
-            #"decimal",
-            #"difflib",
-            #"dis",
-            #"doctest",
-            #"dummy_threading",
-            "_dummy_thread",
-            #"email",
-            #"filecmp",
-            #"fileinput",
-            #"formatter",
-            #"fractions",
-            #"ftplib",
-            #"functools",
-            #"getopt",
-            #"getpass",
-            #"gettext",
-            #"glob",
-            #"gzip",
-            "hashlib",
-            #"heapq",
-            "hmac",
-            #"html",
-            #"http",
-            #"idlelib",
-            #"imaplib",
-            #"imghdr",
-            "imp",
-            "importlib",
-            #"inspect",
-            #"json",
-            #"lib2to3",
-            #"logging",
-            #"macpath",
-            #"macurl2path",
-            #"mailbox",
-            #"mailcap",
-            #"_markupbase",
-            #"mimetypes",
-            #"modulefinder",
-            #"multiprocessing",
-            #"netrc",
-            #"nntplib",
-            #"nturl2path",
-            #"numbers",
-            #"opcode",
-            #"optparse",
-            #"os2emxpath",
-            #"pdb",
-            #"pickle",
-            #"pickletools",
-            #"pipes",
-            #"pkgutil",
-            #"platform",
-            #"plat-linux2",
-            #"plistlib",
-            #"poplib",
-            #"pprint",
-            #"profile",
-            #"pstats",
-            #"pty",
-            #"pyclbr",
-            #"py_compile",
-            #"pydoc_data",
-            #"pydoc",
-            #"_pyio",
-            #"queue",
-            #"quopri",
-            #"reprlib",
-            "rlcompleter",
-            #"runpy",
-            #"sched",
-            #"shelve",
-            #"shlex",
-            #"smtpd",
-            #"smtplib",
-            #"sndhdr",
-            #"socket",
-            #"socketserver",
-            #"sqlite3",
-            #"ssl",
-            #"stringprep",
-            #"string",
-            #"_strptime",
-            #"subprocess",
-            #"sunau",
-            #"symbol",
-            #"symtable",
-            #"sysconfig",
-            #"tabnanny",
-            #"telnetlib",
-            #"test",
-            #"textwrap",
-            #"this",
-            #"_threading_local",
-            #"threading",
-            #"timeit",
-            #"tkinter",
-            #"tokenize",
-            #"token",
-            #"traceback",
-            #"trace",
-            #"tty",
-            #"turtledemo",
-            #"turtle",
-            #"unittest",
-            #"urllib",
-            #"uuid",
-            #"uu",
-            #"wave",
-            #"weakref",
-            #"webbrowser",
-            #"wsgiref",
-            #"xdrlib",
-            #"xml",
-            #"xmlrpc",
-            #"zipfile",
-        ])
-
-if is_pypy:
-    # these are needed to correctly display the exceptions that may happen
-    # during the bootstrap
-    REQUIRED_MODULES.extend(['traceback', 'linecache'])
-
-class Logger(object):
-
-    """
-    Logging object for use in command-line script.  Allows ranges of
-    levels, to avoid some redundancy of displayed information.
-    """
-
-    DEBUG = logging.DEBUG
-    INFO = logging.INFO
-    NOTIFY = (logging.INFO+logging.WARN)/2
-    WARN = WARNING = logging.WARN
-    ERROR = logging.ERROR
-    FATAL = logging.FATAL
-
-    LEVELS = [DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
-
-    def __init__(self, consumers):
-        self.consumers = consumers
-        self.indent = 0
-        self.in_progress = None
-        self.in_progress_hanging = False
-
-    def debug(self, msg, *args, **kw):
-        self.log(self.DEBUG, msg, *args, **kw)
-    def info(self, msg, *args, **kw):
-        self.log(self.INFO, msg, *args, **kw)
-    def notify(self, msg, *args, **kw):
-        self.log(self.NOTIFY, msg, *args, **kw)
-    def warn(self, msg, *args, **kw):
-        self.log(self.WARN, msg, *args, **kw)
-    def error(self, msg, *args, **kw):
-        self.log(self.ERROR, msg, *args, **kw)
-    def fatal(self, msg, *args, **kw):
-        self.log(self.FATAL, msg, *args, **kw)
-    def log(self, level, msg, *args, **kw):
-        if args:
-            if kw:
-                raise TypeError(
-                    "You may give positional or keyword arguments, not both")
-        args = args or kw
-        rendered = None
-        for consumer_level, consumer in self.consumers:
-            if self.level_matches(level, consumer_level):
-                if (self.in_progress_hanging
-                    and consumer in (sys.stdout, sys.stderr)):
-                    self.in_progress_hanging = False
-                    sys.stdout.write('\n')
-                    sys.stdout.flush()
-                if rendered is None:
-                    if args:
-                        rendered = msg % args
-                    else:
-                        rendered = msg
-                    rendered = ' '*self.indent + rendered
-                if hasattr(consumer, 'write'):
-                    consumer.write(rendered+'\n')
-                else:
-                    consumer(rendered)
-
-    def start_progress(self, msg):
-        assert not self.in_progress, (
-            "Tried to start_progress(%r) while in_progress %r"
-            % (msg, self.in_progress))
-        if self.level_matches(self.NOTIFY, self._stdout_level()):
-            sys.stdout.write(msg)
-            sys.stdout.flush()
-            self.in_progress_hanging = True
-        else:
-            self.in_progress_hanging = False
-        self.in_progress = msg
-
-    def end_progress(self, msg='done.'):
-        assert self.in_progress, (
-            "Tried to end_progress without start_progress")
-        if self.stdout_level_matches(self.NOTIFY):
-            if not self.in_progress_hanging:
-                # Some message has been printed out since start_progress
-                sys.stdout.write('...' + self.in_progress + msg + '\n')
-                sys.stdout.flush()
-            else:
-                sys.stdout.write(msg + '\n')
-                sys.stdout.flush()
-        self.in_progress = None
-        self.in_progress_hanging = False
-
-    def show_progress(self):
-        """If we are in a progress scope, and no log messages have been
-        shown, write out another '.'"""
-        if self.in_progress_hanging:
-            sys.stdout.write('.')
-            sys.stdout.flush()
-
-    def stdout_level_matches(self, level):
-        """Returns true if a message at this level will go to stdout"""
-        return self.level_matches(level, self._stdout_level())
-
-    def _stdout_level(self):
-        """Returns the level that stdout runs at"""
-        for level, consumer in self.consumers:
-            if consumer is sys.stdout:
-                return level
-        return self.FATAL
-
-    def level_matches(self, level, consumer_level):
-        """
-        >>> l = Logger([])
-        >>> l.level_matches(3, 4)
-        False
-        >>> l.level_matches(3, 2)
-        True
-        >>> l.level_matches(slice(None, 3), 3)
-        False
-        >>> l.level_matches(slice(None, 3), 2)
-        True
-        >>> l.level_matches(slice(1, 3), 1)
-        True
-        >>> l.level_matches(slice(2, 3), 1)
-        False
-        """
-        if isinstance(level, slice):
-            start, stop = level.start, level.stop
-            if start is not None and start > consumer_level:
-                return False
-            if stop is not None and stop <= consumer_level:
-                return False
-            return True
-        else:
-            return level >= consumer_level
-
-    #@classmethod
-    def level_for_integer(cls, level):
-        levels = cls.LEVELS
-        if level < 0:
-            return levels[0]
-        if level >= len(levels):
-            return levels[-1]
-        return levels[level]
-
-    level_for_integer = classmethod(level_for_integer)
-
-# create a silent logger just to prevent this from being undefined
-# will be overridden with requested verbosity main() is called.
-logger = Logger([(Logger.LEVELS[-1], sys.stdout)])
-
-def mkdir(path):
-    if not os.path.exists(path):
-        logger.info('Creating %s', path)
-        os.makedirs(path)
-    else:
-        logger.info('Directory %s already exists', path)
-
-def copyfileordir(src, dest):
-    if os.path.isdir(src):
-        shutil.copytree(src, dest, True)
-    else:
-        shutil.copy2(src, dest)
-
-def copyfile(src, dest, symlink=True):
-    if not os.path.exists(src):
-        # Some bad symlink in the src
-        logger.warn('Cannot find file %s (bad symlink)', src)
-        return
-    if os.path.exists(dest):
-        logger.debug('File %s already exists', dest)
-        return
-    if not os.path.exists(os.path.dirname(dest)):
-        logger.info('Creating parent directories for %s' % os.path.dirname(dest))
-        os.makedirs(os.path.dirname(dest))
-    if not os.path.islink(src):
-        srcpath = os.path.abspath(src)
-    else:
-        srcpath = os.readlink(src)
-    if symlink and hasattr(os, 'symlink') and not is_win:
-        logger.info('Symlinking %s', dest)
-        try:
-            os.symlink(srcpath, dest)
-        except (OSError, NotImplementedError):
-            logger.info('Symlinking failed, copying to %s', dest)
-            copyfileordir(src, dest)
-    else:
-        logger.info('Copying to %s', dest)
-        copyfileordir(src, dest)
-
-def writefile(dest, content, overwrite=True):
-    if not os.path.exists(dest):
-        logger.info('Writing %s', dest)
-        f = open(dest, 'wb')
-        f.write(content.encode('utf-8'))
-        f.close()
-        return
-    else:
-        f = open(dest, 'rb')
-        c = f.read()
-        f.close()
-        if c != content.encode("utf-8"):
-            if not overwrite:
-                logger.notify('File %s exists with different content; not overwriting', dest)
-                return
-            logger.notify('Overwriting %s with new content', dest)
-            f = open(dest, 'wb')
-            f.write(content.encode('utf-8'))
-            f.close()
-        else:
-            logger.info('Content %s already in place', dest)
-
-def rmtree(dir):
-    if os.path.exists(dir):
-        logger.notify('Deleting tree %s', dir)
-        shutil.rmtree(dir)
-    else:
-        logger.info('Do not need to delete %s; already gone', dir)
-
-def make_exe(fn):
-    if hasattr(os, 'chmod'):
-        oldmode = os.stat(fn).st_mode & 0xFFF # 0o7777
-        newmode = (oldmode | 0x16D) & 0xFFF # 0o555, 0o7777
-        os.chmod(fn, newmode)
-        logger.info('Changed mode of %s to %s', fn, oct(newmode))
-
-def _find_file(filename, dirs):
-    for dir in reversed(dirs):
-        files = glob.glob(os.path.join(dir, filename))
-        if files and os.path.isfile(files[0]):
-            return True, files[0]
-    return False, filename
-
-def _install_req(py_executable, unzip=False, distribute=False,
-                 search_dirs=None, never_download=False):
-
-    if search_dirs is None:
-        search_dirs = file_search_dirs()
-
-    if not distribute:
-        egg_path = 'setuptools-*-py%s.egg' % sys.version[:3]
-        found, egg_path = _find_file(egg_path, search_dirs)
-        project_name = 'setuptools'
-        bootstrap_script = EZ_SETUP_PY
-        tgz_path = None
-    else:
-        # Look for a distribute egg (these are not distributed by default,
-        # but can be made available by the user)
-        egg_path = 'distribute-*-py%s.egg' % sys.version[:3]
-        found, egg_path = _find_file(egg_path, search_dirs)
-        project_name = 'distribute'
-        if found:
-            tgz_path = None
-            bootstrap_script = DISTRIBUTE_FROM_EGG_PY
-        else:
-            # Fall back to sdist
-            # NB: egg_path is not None iff tgz_path is None
-            # iff bootstrap_script is a generic setup script accepting
-            # the standard arguments.
-            egg_path = None
-            tgz_path = 'distribute-*.tar.gz'
-            found, tgz_path = _find_file(tgz_path, search_dirs)
-            bootstrap_script = DISTRIBUTE_SETUP_PY
-
-    if is_jython and os._name == 'nt':
-        # Jython's .bat sys.executable can't handle a command line
-        # argument with newlines
-        fd, ez_setup = tempfile.mkstemp('.py')
-        os.write(fd, bootstrap_script)
-        os.close(fd)
-        cmd = [py_executable, ez_setup]
-    else:
-        cmd = [py_executable, '-c', bootstrap_script]
-    if unzip and egg_path:
-        cmd.append('--always-unzip')
-    env = {}
-    remove_from_env = ['__PYVENV_LAUNCHER__']
-    if logger.stdout_level_matches(logger.DEBUG) and egg_path:
-        cmd.append('-v')
-
-    old_chdir = os.getcwd()
-    if egg_path is not None and os.path.exists(egg_path):
-        logger.info('Using existing %s egg: %s' % (project_name, egg_path))
-        cmd.append(egg_path)
-        if os.environ.get('PYTHONPATH'):
-            env['PYTHONPATH'] = egg_path + os.path.pathsep + os.environ['PYTHONPATH']
-        else:
-            env['PYTHONPATH'] = egg_path
-    elif tgz_path is not None and os.path.exists(tgz_path):
-        # Found a tgz source dist, let's chdir
-        logger.info('Using existing %s egg: %s' % (project_name, tgz_path))
-        os.chdir(os.path.dirname(tgz_path))
-        # in this case, we want to be sure that PYTHONPATH is unset (not
-        # just empty, really unset), else CPython tries to import the
-        # site.py that it's in virtualenv_support
-        remove_from_env.append('PYTHONPATH')
-    elif never_download:
-        logger.fatal("Can't find any local distributions of %s to install "
-                     "and --never-download is set.  Either re-run virtualenv "
-                     "without the --never-download option, or place a %s "
-                     "distribution (%s) in one of these "
-                     "locations: %r" % (project_name, project_name,
-                                        egg_path or tgz_path,
-                                        search_dirs))
-        sys.exit(1)
-    elif egg_path:
-        logger.info('No %s egg found; downloading' % project_name)
-        cmd.extend(['--always-copy', '-U', project_name])
-    else:
-        logger.info('No %s tgz found; downloading' % project_name)
-    logger.start_progress('Installing %s...' % project_name)
-    logger.indent += 2
-    cwd = None
-    if project_name == 'distribute':
-        env['DONT_PATCH_SETUPTOOLS'] = 'true'
-
-    def _filter_ez_setup(line):
-        return filter_ez_setup(line, project_name)
-
-    if not os.access(os.getcwd(), os.W_OK):
-        cwd = tempfile.mkdtemp()
-        if tgz_path is not None and os.path.exists(tgz_path):
-            # the current working dir is hostile, let's copy the
-            # tarball to a temp dir
-            target = os.path.join(cwd, os.path.split(tgz_path)[-1])
-            shutil.copy(tgz_path, target)
-    try:
-        call_subprocess(cmd, show_stdout=False,
-                        filter_stdout=_filter_ez_setup,
-                        extra_env=env,
-                        remove_from_env=remove_from_env,
-                        cwd=cwd)
-    finally:
-        logger.indent -= 2
-        logger.end_progress()
-        if cwd is not None:
-            shutil.rmtree(cwd)
-        if os.getcwd() != old_chdir:
-            os.chdir(old_chdir)
-        if is_jython and os._name == 'nt':
-            os.remove(ez_setup)
-
-def file_search_dirs():
-    here = os.path.dirname(os.path.abspath(__file__))
-    dirs = ['.', here,
-            join(here, 'virtualenv_support')]
-    if os.path.splitext(os.path.dirname(__file__))[0] != 'virtualenv':
-        # Probably some boot script; just in case virtualenv is installed...
-        try:
-            import virtualenv
-        except ImportError:
-            pass
-        else:
-            dirs.append(os.path.join(os.path.dirname(virtualenv.__file__), 'virtualenv_support'))
-    return [d for d in dirs if os.path.isdir(d)]
-
-def install_setuptools(py_executable, unzip=False,
-                       search_dirs=None, never_download=False):
-    _install_req(py_executable, unzip,
-                 search_dirs=search_dirs, never_download=never_download)
-
-def install_distribute(py_executable, unzip=False,
-                       search_dirs=None, never_download=False):
-    _install_req(py_executable, unzip, distribute=True,
-                 search_dirs=search_dirs, never_download=never_download)
-
-_pip_re = re.compile(r'^pip-.*(zip|tar.gz|tar.bz2|tgz|tbz)$', re.I)
-def install_pip(py_executable, search_dirs=None, never_download=False):
-    if search_dirs is None:
-        search_dirs = file_search_dirs()
-
-    filenames = []
-    for dir in search_dirs:
-        filenames.extend([join(dir, fn) for fn in os.listdir(dir)
-                          if _pip_re.search(fn)])
-    filenames = [(os.path.basename(filename).lower(), i, filename) for i, filename in enumerate(filenames)]
-    filenames.sort()
-    filenames = [filename for basename, i, filename in filenames]
-    if not filenames:
-        filename = 'pip'
-    else:
-        filename = filenames[-1]
-    easy_install_script = 'easy_install'
-    if is_win:
-        easy_install_script = 'easy_install-script.py'
-    # There's two subtle issues here when invoking easy_install.
-    # 1. On unix-like systems the easy_install script can *only* be executed
-    #    directly if its full filesystem path is no longer than 78 characters.
-    # 2. A work around to [1] is to use the `python path/to/easy_install foo`
-    #    pattern, but that breaks if the path contains non-ASCII characters, as
-    #    you can't put the file encoding declaration before the shebang line.
-    # The solution is to use Python's -x flag to skip the first line of the
-    # script (and any ASCII decoding errors that may have occurred in that line)
-    cmd = [py_executable, '-x', join(os.path.dirname(py_executable), easy_install_script), filename]
-    # jython and pypy don't yet support -x
-    if is_jython or is_pypy:
-        cmd.remove('-x')
-    if filename == 'pip':
-        if never_download:
-            logger.fatal("Can't find any local distributions of pip to install "
-                         "and --never-download is set.  Either re-run virtualenv "
-                         "without the --never-download option, or place a pip "
-                         "source distribution (zip/tar.gz/tar.bz2) in one of these "
-                         "locations: %r" % search_dirs)
-            sys.exit(1)
-        logger.info('Installing pip from network...')
-    else:
-        logger.info('Installing existing %s distribution: %s' % (
-                os.path.basename(filename), filename))
-    logger.start_progress('Installing pip...')
-    logger.indent += 2
-    def _filter_setup(line):
-        return filter_ez_setup(line, 'pip')
-    try:
-        call_subprocess(cmd, show_stdout=False,
-                        filter_stdout=_filter_setup)
-    finally:
-        logger.indent -= 2
-        logger.end_progress()
-
-def filter_ez_setup(line, project_name='setuptools'):
-    if not line.strip():
-        return Logger.DEBUG
-    if project_name == 'distribute':
-        for prefix in ('Extracting', 'Now working', 'Installing', 'Before',
-                       'Scanning', 'Setuptools', 'Egg', 'Already',
-                       'running', 'writing', 'reading', 'installing',
-                       'creating', 'copying', 'byte-compiling', 'removing',
-                       'Processing'):
-            if line.startswith(prefix):
-                return Logger.DEBUG
-        return Logger.DEBUG
-    for prefix in ['Reading ', 'Best match', 'Processing setuptools',
-                   'Copying setuptools', 'Adding setuptools',
-                   'Installing ', 'Installed ']:
-        if line.startswith(prefix):
-            return Logger.DEBUG
-    return Logger.INFO
-
-
-class UpdatingDefaultsHelpFormatter(optparse.IndentedHelpFormatter):
-    """
-    Custom help formatter for use in ConfigOptionParser that updates
-    the defaults before expanding them, allowing them to show up correctly
-    in the help listing
-    """
-    def expand_default(self, option):
-        if self.parser is not None:
-            self.parser.update_defaults(self.parser.defaults)
-        return optparse.IndentedHelpFormatter.expand_default(self, option)
-
-
-class ConfigOptionParser(optparse.OptionParser):
-    """
-    Custom option parser which updates its defaults by by checking the
-    configuration files and environmental variables
-    """
-    def __init__(self, *args, **kwargs):
-        self.config = ConfigParser.RawConfigParser()
-        self.files = self.get_config_files()
-        self.config.read(self.files)
-        optparse.OptionParser.__init__(self, *args, **kwargs)
-
-    def get_config_files(self):
-        config_file = os.environ.get('VIRTUALENV_CONFIG_FILE', False)
-        if config_file and os.path.exists(config_file):
-            return [config_file]
-        return [default_config_file]
-
-    def update_defaults(self, defaults):
-        """
-        Updates the given defaults with values from the config files and
-        the environ. Does a little special handling for certain types of
-        options (lists).
-        """
-        # Then go and look for the other sources of configuration:
-        config = {}
-        # 1. config files
-        config.update(dict(self.get_config_section('virtualenv')))
-        # 2. environmental variables
-        config.update(dict(self.get_environ_vars()))
-        # Then set the options with those values
-        for key, val in config.items():
-            key = key.replace('_', '-')
-            if not key.startswith('--'):
-                key = '--%s' % key  # only prefer long opts
-            option = self.get_option(key)
-            if option is not None:
-                # ignore empty values
-                if not val:
-                    continue
-                # handle multiline configs
-                if option.action == 'append':
-                    val = val.split()
-                else:
-                    option.nargs = 1
-                if option.action == 'store_false':
-                    val = not strtobool(val)
-                elif option.action in ('store_true', 'count'):
-                    val = strtobool(val)
-                try:
-                    val = option.convert_value(key, val)
-                except optparse.OptionValueError:
-                    e = sys.exc_info()[1]
-                    print("An error occured during configuration: %s" % e)
-                    sys.exit(3)
-                defaults[option.dest] = val
-        return defaults
-
-    def get_config_section(self, name):
-        """
-        Get a section of a configuration
-        """
-        if self.config.has_section(name):
-            return self.config.items(name)
-        return []
-
-    def get_environ_vars(self, prefix='VIRTUALENV_'):
-        """
-        Returns a generator with all environmental vars with prefix VIRTUALENV
-        """
-        for key, val in os.environ.items():
-            if key.startswith(prefix):
-                yield (key.replace(prefix, '').lower(), val)
-
-    def get_default_values(self):
-        """
-        Overridding to make updating the defaults after instantiation of
-        the option parser possible, update_defaults() does the dirty work.
-        """
-        if not self.process_default_values:
-            # Old, pre-Optik 1.5 behaviour.
-            return optparse.Values(self.defaults)
-
-        defaults = self.update_defaults(self.defaults.copy())  # ours
-        for option in self._get_all_options():
-            default = defaults.get(option.dest)
-            if isinstance(default, basestring):
-                opt_str = option.get_opt_string()
-                defaults[option.dest] = option.check_value(opt_str, default)
-        return optparse.Values(defaults)
-
-
-def main():
-    parser = ConfigOptionParser(
-        version=virtualenv_version,
-        usage="%prog [OPTIONS] DEST_DIR",
-        formatter=UpdatingDefaultsHelpFormatter())
-
-    parser.add_option(
-        '-v', '--verbose',
-        action='count',
-        dest='verbose',
-        default=0,
-        help="Increase verbosity")
-
-    parser.add_option(
-        '-q', '--quiet',
-        action='count',
-        dest='quiet',
-        default=0,
-        help='Decrease verbosity')
-
-    parser.add_option(
-        '-p', '--python',
-        dest='python',
-        metavar='PYTHON_EXE',
-        help='The Python interpreter to use, e.g., --python=python2.5 will use the python2.5 '
-        'interpreter to create the new environment.  The default is the interpreter that '
-        'virtualenv was installed with (%s)' % sys.executable)
-
-    parser.add_option(
-        '--clear',
-        dest='clear',
-        action='store_true',
-        help="Clear out the non-root install and start from scratch")
-
-    parser.set_defaults(system_site_packages=False)
-    parser.add_option(
-        '--no-site-packages',
-        dest='system_site_packages',
-        action='store_false',
-        help="Don't give access to the global site-packages dir to the "
-             "virtual environment (default)")
-
-    parser.add_option(
-        '--system-site-packages',
-        dest='system_site_packages',
-        action='store_true',
-        help="Give access to the global site-packages dir to the "
-             "virtual environment")
-
-    parser.add_option(
-        '--unzip-setuptools',
-        dest='unzip_setuptools',
-        action='store_true',
-        help="Unzip Setuptools or Distribute when installing it")
-
-    parser.add_option(
-        '--relocatable',
-        dest='relocatable',
-        action='store_true',
-        help='Make an EXISTING virtualenv environment relocatable.  '
-        'This fixes up scripts and makes all .pth files relative')
-
-    parser.add_option(
-        '--distribute', '--use-distribute',  # the second option is for legacy reasons here. Hi Kenneth!
-        dest='use_distribute',
-        action='store_true',
-        help='Use Distribute instead of Setuptools. Set environ variable '
-        'VIRTUALENV_DISTRIBUTE to make it the default ')
-
-    parser.add_option(
-        '--no-setuptools',
-        dest='no_setuptools',
-        action='store_true',
-        help='Do not install distribute/setuptools (or pip) '
-        'in the new virtualenv.')
-
-    parser.add_option(
-        '--no-pip',
-        dest='no_pip',
-        action='store_true',
-        help='Do not install pip in the new virtualenv.')
-
-    parser.add_option(
-        '--setuptools',
-        dest='use_distribute',
-        action='store_false',
-        help='Use Setuptools instead of Distribute.  Set environ variable '
-        'VIRTUALENV_SETUPTOOLS to make it the default ')
-
-    # Set this to True to use distribute by default, even in Python 2.
-    parser.set_defaults(use_distribute=False)
-
-    default_search_dirs = file_search_dirs()
-    parser.add_option(
-        '--extra-search-dir',
-        dest="search_dirs",
-        action="append",
-        default=default_search_dirs,
-        help="Directory to look for setuptools/distribute/pip distributions in. "
-        "You can add any number of additional --extra-search-dir paths.")
-
-    parser.add_option(
-        '--never-download',
-        dest="never_download",
-        action="store_true",
-        help="Never download anything from the network.  Instead, virtualenv will fail "
-        "if local distributions of setuptools/distribute/pip are not present.")
-
-    parser.add_option(
-        '--prompt',
-        dest='prompt',
-        help='Provides an alternative prompt prefix for this environment')
-
-    if 'extend_parser' in globals():
-        extend_parser(parser)
-
-    options, args = parser.parse_args()
-
-    global logger
-
-    if 'adjust_options' in globals():
-        adjust_options(options, args)
-
-    verbosity = options.verbose - options.quiet
-    logger = Logger([(Logger.level_for_integer(2 - verbosity), sys.stdout)])
-
-    if options.python and not os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
-        env = os.environ.copy()
-        interpreter = resolve_interpreter(options.python)
-        if interpreter == sys.executable:
-            logger.warn('Already using interpreter %s' % interpreter)
-        else:
-            logger.notify('Running virtualenv with interpreter %s' % interpreter)
-            env['VIRTUALENV_INTERPRETER_RUNNING'] = 'true'
-            file = __file__
-            if file.endswith('.pyc'):
-                file = file[:-1]
-            popen = subprocess.Popen([interpreter, file] + sys.argv[1:], env=env)
-            raise SystemExit(popen.wait())
-
-    # Force --distribute on Python 3, since setuptools is not available.
-    if majver > 2:
-        options.use_distribute = True
-
-    if os.environ.get('PYTHONDONTWRITEBYTECODE') and not options.use_distribute:
-        print(
-            "The PYTHONDONTWRITEBYTECODE environment variable is "
-            "not compatible with setuptools. Either use --distribute "
-            "or unset PYTHONDONTWRITEBYTECODE.")
-        sys.exit(2)
-    if not args:
-        print('You must provide a DEST_DIR')
-        parser.print_help()
-        sys.exit(2)
-    if len(args) > 1:
-        print('There must be only one argument: DEST_DIR (you gave %s)' % (
-            ' '.join(args)))
-        parser.print_help()
-        sys.exit(2)
-
-    home_dir = args[0]
-
-    if os.environ.get('WORKING_ENV'):
-        logger.fatal('ERROR: you cannot run virtualenv while in a workingenv')
-        logger.fatal('Please deactivate your workingenv, then re-run this script')
-        sys.exit(3)
-
-    if 'PYTHONHOME' in os.environ:
-        logger.warn('PYTHONHOME is set.  You *must* activate the virtualenv before using it')
-        del os.environ['PYTHONHOME']
-
-    if options.relocatable:
-        make_environment_relocatable(home_dir)
-        return
-
-    create_environment(home_dir,
-                       site_packages=options.system_site_packages,
-                       clear=options.clear,
-                       unzip_setuptools=options.unzip_setuptools,
-                       use_distribute=options.use_distribute,
-                       prompt=options.prompt,
-                       search_dirs=options.search_dirs,
-                       never_download=options.never_download,
-                       no_setuptools=options.no_setuptools,
-                       no_pip=options.no_pip)
-    if 'after_install' in globals():
-        after_install(options, home_dir)
-
-def call_subprocess(cmd, show_stdout=True,
-                    filter_stdout=None, cwd=None,
-                    raise_on_returncode=True, extra_env=None,
-                    remove_from_env=None):
-    cmd_parts = []
-    for part in cmd:
-        if len(part) > 45:
-            part = part[:20]+"..."+part[-20:]
-        if ' ' in part or '\n' in part or '"' in part or "'" in part:
-            part = '"%s"' % part.replace('"', '\\"')
-        if hasattr(part, 'decode'):
-            try:
-                part = part.decode(sys.getdefaultencoding())
-            except UnicodeDecodeError:
-                part = part.decode(sys.getfilesystemencoding())
-        cmd_parts.append(part)
-    cmd_desc = ' '.join(cmd_parts)
-    if show_stdout:
-        stdout = None
-    else:
-        stdout = subprocess.PIPE
-    logger.debug("Running command %s" % cmd_desc)
-    if extra_env or remove_from_env:
-        env = os.environ.copy()
-        if extra_env:
-            env.update(extra_env)
-        if remove_from_env:
-            for varname in remove_from_env:
-                env.pop(varname, None)
-    else:
-        env = None
-    try:
-        proc = subprocess.Popen(
-            cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
-            cwd=cwd, env=env)
-    except Exception:
-        e = sys.exc_info()[1]
-        logger.fatal(
-            "Error %s while executing command %s" % (e, cmd_desc))
-        raise
-    all_output = []
-    if stdout is not None:
-        stdout = proc.stdout
-        encoding = sys.getdefaultencoding()
-        fs_encoding = sys.getfilesystemencoding()
-        while 1:
-            line = stdout.readline()
-            try:
-                line = line.decode(encoding)
-            except UnicodeDecodeError:
-                line = line.decode(fs_encoding)
-            if not line:
-                break
-            line = line.rstrip()
-            all_output.append(line)
-            if filter_stdout:
-                level = filter_stdout(line)
-                if isinstance(level, tuple):
-                    level, line = level
-                logger.log(level, line)
-                if not logger.stdout_level_matches(level):
-                    logger.show_progress()
-            else:
-                logger.info(line)
-    else:
-        proc.communicate()
-    proc.wait()
-    if proc.returncode:
-        if raise_on_returncode:
-            if all_output:
-                logger.notify('Complete output from command %s:' % cmd_desc)
-                logger.notify('\n'.join(all_output) + '\n----------------------------------------')
-            raise OSError(
-                "Command %s failed with error code %s"
-                % (cmd_desc, proc.returncode))
-        else:
-            logger.warn(
-                "Command %s had error code %s"
-                % (cmd_desc, proc.returncode))
-
-
-def create_environment(home_dir, site_packages=False, clear=False,
-                       unzip_setuptools=False, use_distribute=False,
-                       prompt=None, search_dirs=None, never_download=False,
-                       no_setuptools=False, no_pip=False):
-    """
-    Creates a new environment in ``home_dir``.
-
-    If ``site_packages`` is true, then the global ``site-packages/``
-    directory will be on the path.
-
-    If ``clear`` is true (default False) then the environment will
-    first be cleared.
-    """
-    home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
-
-    py_executable = os.path.abspath(install_python(
-        home_dir, lib_dir, inc_dir, bin_dir,
-        site_packages=site_packages, clear=clear))
-
-    install_distutils(home_dir)
-
-    if not no_setuptools:
-        if use_distribute:
-            install_distribute(py_executable, unzip=unzip_setuptools,
-                               search_dirs=search_dirs, never_download=never_download)
-        else:
-            install_setuptools(py_executable, unzip=unzip_setuptools,
-                               search_dirs=search_dirs, never_download=never_download)
-
-        if not no_pip:
-            install_pip(py_executable, search_dirs=search_dirs, never_download=never_download)
-
-    install_activate(home_dir, bin_dir, prompt)
-
-def is_executable_file(fpath):
-    return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
-
-def path_locations(home_dir):
-    """Return the path locations for the environment (where libraries are,
-    where scripts go, etc)"""
-    # XXX: We'd use distutils.sysconfig.get_python_inc/lib but its
-    # prefix arg is broken: http://bugs.python.org/issue3386
-    if is_win:
-        # Windows has lots of problems with executables with spaces in
-        # the name; this function will remove them (using the ~1
-        # format):
-        mkdir(home_dir)
-        if ' ' in home_dir:
-            import ctypes
-            GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW
-            size = max(len(home_dir)+1, 256)
-            buf = ctypes.create_unicode_buffer(size)
-            try:
-                u = unicode
-            except NameError:
-                u = str
-            ret = GetShortPathName(u(home_dir), buf, size)
-            if not ret:
-                print('Error: the path "%s" has a space in it' % home_dir)
-                print('We could not determine the short pathname for it.')
-                print('Exiting.')
-                sys.exit(3)
-            home_dir = str(buf.value)
-        lib_dir = join(home_dir, 'Lib')
-        inc_dir = join(home_dir, 'Include')
-        bin_dir = join(home_dir, 'Scripts')
-    if is_jython:
-        lib_dir = join(home_dir, 'Lib')
-        inc_dir = join(home_dir, 'Include')
-        bin_dir = join(home_dir, 'bin')
-    elif is_pypy:
-        lib_dir = home_dir
-        inc_dir = join(home_dir, 'include')
-        bin_dir = join(home_dir, 'bin')
-    elif not is_win:
-        lib_dir = join(home_dir, 'lib', py_version)
-        multiarch_exec = '/usr/bin/multiarch-platform'
-        if is_executable_file(multiarch_exec):
-            # In Mageia (2) and Mandriva distros the include dir must be like:
-            # virtualenv/include/multiarch-x86_64-linux/python2.7
-            # instead of being virtualenv/include/python2.7
-            p = subprocess.Popen(multiarch_exec, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-            stdout, stderr = p.communicate()
-            # stdout.strip is needed to remove newline character
-            inc_dir = join(home_dir, 'include', stdout.strip(), py_version + abiflags)
-        else:
-            inc_dir = join(home_dir, 'include', py_version + abiflags)
-        bin_dir = join(home_dir, 'bin')
-    return home_dir, lib_dir, inc_dir, bin_dir
-
-
-def change_prefix(filename, dst_prefix):
-    prefixes = [sys.prefix]
-
-    if is_darwin:
-        prefixes.extend((
-            os.path.join("/Library/Python", sys.version[:3], "site-packages"),
-            os.path.join(sys.prefix, "Extras", "lib", "python"),
-            os.path.join("~", "Library", "Python", sys.version[:3], "site-packages"),
-            # Python 2.6 no-frameworks
-            os.path.join("~", ".local", "lib","python", sys.version[:3], "site-packages"),
-            # System Python 2.7 on OSX Mountain Lion
-            os.path.join("~", "Library", "Python", sys.version[:3], "lib", "python", "site-packages")))
-
-    if hasattr(sys, 'real_prefix'):
-        prefixes.append(sys.real_prefix)
-    if hasattr(sys, 'base_prefix'):
-        prefixes.append(sys.base_prefix)
-    prefixes = list(map(os.path.expanduser, prefixes))
-    prefixes = list(map(os.path.abspath, prefixes))
-    # Check longer prefixes first so we don't split in the middle of a filename
-    prefixes = sorted(prefixes, key=len, reverse=True)
-    filename = os.path.abspath(filename)
-    for src_prefix in prefixes:
-        if filename.startswith(src_prefix):
-            _, relpath = filename.split(src_prefix, 1)
-            if src_prefix != os.sep: # sys.prefix == "/"
-                assert relpath[0] == os.sep
-                relpath = relpath[1:]
-            return join(dst_prefix, relpath)
-    assert False, "Filename %s does not start with any of these prefixes: %s" % \
-        (filename, prefixes)
-
-def copy_required_modules(dst_prefix):
-    import imp
-    # If we are running under -p, we need to remove the current
-    # directory from sys.path temporarily here, so that we
-    # definitely get the modules from the site directory of
-    # the interpreter we are running under, not the one
-    # virtualenv.py is installed under (which might lead to py2/py3
-    # incompatibility issues)
-    _prev_sys_path = sys.path
-    if os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'):
-        sys.path = sys.path[1:]
-    try:
-        for modname in REQUIRED_MODULES:
-            if modname in sys.builtin_module_names:
-                logger.info("Ignoring built-in bootstrap module: %s" % modname)
-                continue
-            try:
-                f, filename, _ = imp.find_module(modname)
-            except ImportError:
-                logger.info("Cannot import bootstrap module: %s" % modname)
-            else:
-                if f is not None:
-                    f.close()
-                # special-case custom readline.so on OS X, but not for pypy:
-                if modname == 'readline' and sys.platform == 'darwin' and not (
-                        is_pypy or filename.endswith(join('lib-dynload', 'readline.so'))):
-                    dst_filename = join(dst_prefix, 'lib', 'python%s' % sys.version[:3], 'readline.so')
-                else:
-                    dst_filename = change_prefix(filename, dst_prefix)
-                copyfile(filename, dst_filename)
-                if filename.endswith('.pyc'):
-                    pyfile = filename[:-1]
-                    if os.path.exists(pyfile):
-                        copyfile(pyfile, dst_filename[:-1])
-    finally:
-        sys.path = _prev_sys_path
-
-
-def subst_path(prefix_path, prefix, home_dir):
-    prefix_path = os.path.normpath(prefix_path)
-    prefix = os.path.normpath(prefix)
-    home_dir = os.path.normpath(home_dir)
-    if not prefix_path.startswith(prefix):
-        logger.warn('Path not in prefix %r %r', prefix_path, prefix)
-        return
-    return prefix_path.replace(prefix, home_dir, 1)
-
-
-def install_python(home_dir, lib_dir, inc_dir, bin_dir, site_packages, clear):
-    """Install just the base environment, no distutils patches etc"""
-    if sys.executable.startswith(bin_dir):
-        print('Please use the *system* python to run this script')
-        return
-
-    if clear:
-        rmtree(lib_dir)
-        ## FIXME: why not delete it?
-        ## Maybe it should delete everything with #!/path/to/venv/python in it
-        logger.notify('Not deleting %s', bin_dir)
-
-    if hasattr(sys, 'real_prefix'):
-        logger.notify('Using real prefix %r' % sys.real_prefix)
-        prefix = sys.real_prefix
-    elif hasattr(sys, 'base_prefix'):
-        logger.notify('Using base prefix %r' % sys.base_prefix)
-        prefix = sys.base_prefix
-    else:
-        prefix = sys.prefix
-    mkdir(lib_dir)
-    fix_lib64(lib_dir)
-    stdlib_dirs = [os.path.dirname(os.__file__)]
-    if is_win:
-        stdlib_dirs.append(join(os.path.dirname(stdlib_dirs[0]), 'DLLs'))
-    elif is_darwin:
-        stdlib_dirs.append(join(stdlib_dirs[0], 'site-packages'))
-    if hasattr(os, 'symlink'):
-        logger.info('Symlinking Python bootstrap modules')
-    else:
-        logger.info('Copying Python bootstrap modules')
-    logger.indent += 2
-    try:
-        # copy required files...
-        for stdlib_dir in stdlib_dirs:
-            if not os.path.isdir(stdlib_dir):
-                continue
-            for fn in os.listdir(stdlib_dir):
-                bn = os.path.splitext(fn)[0]
-                if fn != 'site-packages' and bn in REQUIRED_FILES:
-                    copyfile(join(stdlib_dir, fn), join(lib_dir, fn))
-        # ...and modules
-        copy_required_modules(home_dir)
-    finally:
-        logger.indent -= 2
-    mkdir(join(lib_dir, 'site-packages'))
-    import site
-    site_filename = site.__file__
-    if site_filename.endswith('.pyc'):
-        site_filename = site_filename[:-1]
-    elif site_filename.endswith('$py.class'):
-        site_filename = site_filename.replace('$py.class', '.py')
-    site_filename_dst = change_prefix(site_filename, home_dir)
-    site_dir = os.path.dirname(site_filename_dst)
-    writefile(site_filename_dst, SITE_PY)
-    writefile(join(site_dir, 'orig-prefix.txt'), prefix)
-    site_packages_filename = join(site_dir, 'no-global-site-packages.txt')
-    if not site_packages:
-        writefile(site_packages_filename, '')
-
-    if is_pypy or is_win:
-        stdinc_dir = join(prefix, 'include')
-    else:
-        stdinc_dir = join(prefix, 'include', py_version + abiflags)
-    if os.path.exists(stdinc_dir):
-        copyfile(stdinc_dir, inc_dir)
-    else:
-        logger.debug('No include dir %s' % stdinc_dir)
-
-    platinc_dir = distutils.sysconfig.get_python_inc(plat_specific=1)
-    if platinc_dir != stdinc_dir:
-        platinc_dest = distutils.sysconfig.get_python_inc(
-            plat_specific=1, prefix=home_dir)
-        if platinc_dir == platinc_dest:
-            # Do platinc_dest manually due to a CPython bug;
-            # not http://bugs.python.org/issue3386 but a close cousin
-            platinc_dest = subst_path(platinc_dir, prefix, home_dir)
-        if platinc_dest:
-            # PyPy's stdinc_dir and prefix are relative to the original binary
-            # (traversing virtualenvs), whereas the platinc_dir is relative to
-            # the inner virtualenv and ignores the prefix argument.
-            # This seems more evolved than designed.
-            copyfile(platinc_dir, platinc_dest)
-
-    # pypy never uses exec_prefix, just ignore it
-    if sys.exec_prefix != prefix and not is_pypy:
-        if is_win:
-            exec_dir = join(sys.exec_prefix, 'lib')
-        elif is_jython:
-            exec_dir = join(sys.exec_prefix, 'Lib')
-        else:
-            exec_dir = join(sys.exec_prefix, 'lib', py_version)
-        for fn in os.listdir(exec_dir):
-            copyfile(join(exec_dir, fn), join(lib_dir, fn))
-
-    if is_jython:
-        # Jython has either jython-dev.jar and javalib/ dir, or just
-        # jython.jar
-        for name in 'jython-dev.jar', 'javalib', 'jython.jar':
-            src = join(prefix, name)
-            if os.path.exists(src):
-                copyfile(src, join(home_dir, name))
-        # XXX: registry should always exist after Jython 2.5rc1
-        src = join(prefix, 'registry')
-        if os.path.exists(src):
-            copyfile(src, join(home_dir, 'registry'), symlink=False)
-        copyfile(join(prefix, 'cachedir'), join(home_dir, 'cachedir'),
-                 symlink=False)
-
-    mkdir(bin_dir)
-    py_executable = join(bin_dir, os.path.basename(sys.executable))
-    if 'Python.framework' in prefix:
-        # OS X framework builds cause validation to break
-        # https://github.com/pypa/virtualenv/issues/322
-        if os.environ.get('__PYVENV_LAUNCHER__'):
-          os.unsetenv('__PYVENV_LAUNCHER__')
-        if re.search(r'/Python(?:-32|-64)*$', py_executable):
-            # The name of the python executable is not quite what
-            # we want, rename it.
-            py_executable = os.path.join(
-                    os.path.dirname(py_executable), 'python')
-
-    logger.notify('New %s executable in %s', expected_exe, py_executable)
-    pcbuild_dir = os.path.dirname(sys.executable)
-    pyd_pth = os.path.join(lib_dir, 'site-packages', 'virtualenv_builddir_pyd.pth')
-    if is_win and os.path.exists(os.path.join(pcbuild_dir, 'build.bat')):
-        logger.notify('Detected python running from build directory %s', pcbuild_dir)
-        logger.notify('Writing .pth file linking to build directory for *.pyd files')
-        writefile(pyd_pth, pcbuild_dir)
-    else:
-        pcbuild_dir = None
-        if os.path.exists(pyd_pth):
-            logger.info('Deleting %s (not Windows env or not build directory python)' % pyd_pth)
-            os.unlink(pyd_pth)
-
-    if sys.executable != py_executable:
-        ## FIXME: could I just hard link?
-        executable = sys.executable
-        shutil.copyfile(executable, py_executable)
-        make_exe(py_executable)
-        if is_win or is_cygwin:
-            pythonw = os.path.join(os.path.dirname(sys.executable), 'pythonw.exe')
-            if os.path.exists(pythonw):
-                logger.info('Also created pythonw.exe')
-                shutil.copyfile(pythonw, os.path.join(os.path.dirname(py_executable), 'pythonw.exe'))
-            python_d = os.path.join(os.path.dirname(sys.executable), 'python_d.exe')
-            python_d_dest = os.path.join(os.path.dirname(py_executable), 'python_d.exe')
-            if os.path.exists(python_d):
-                logger.info('Also created python_d.exe')
-                shutil.copyfile(python_d, python_d_dest)
-            elif os.path.exists(python_d_dest):
-                logger.info('Removed python_d.exe as it is no longer at the source')
-                os.unlink(python_d_dest)
-            # we need to copy the DLL to enforce that windows will load the correct one.
-            # may not exist if we are cygwin.
-            py_executable_dll = 'python%s%s.dll' % (
-                sys.version_info[0], sys.version_info[1])
-            py_executable_dll_d = 'python%s%s_d.dll' % (
-                sys.version_info[0], sys.version_info[1])
-            pythondll = os.path.join(os.path.dirname(sys.executable), py_executable_dll)
-            pythondll_d = os.path.join(os.path.dirname(sys.executable), py_executable_dll_d)
-            pythondll_d_dest = os.path.join(os.path.dirname(py_executable), py_executable_dll_d)
-            if os.path.exists(pythondll):
-                logger.info('Also created %s' % py_executable_dll)
-                shutil.copyfile(pythondll, os.path.join(os.path.dirname(py_executable), py_executable_dll))
-            if os.path.exists(pythondll_d):
-                logger.info('Also created %s' % py_executable_dll_d)
-                shutil.copyfile(pythondll_d, pythondll_d_dest)
-            elif os.path.exists(pythondll_d_dest):
-                logger.info('Removed %s as the source does not exist' % pythondll_d_dest)
-                os.unlink(pythondll_d_dest)
-        if is_pypy:
-            # make a symlink python --> pypy-c
-            python_executable = os.path.join(os.path.dirname(py_executable), 'python')
-            if sys.platform in ('win32', 'cygwin'):
-                python_executable += '.exe'
-            logger.info('Also created executable %s' % python_executable)
-            copyfile(py_executable, python_executable)
-
-            if is_win:
-                for name in 'libexpat.dll', 'libpypy.dll', 'libpypy-c.dll', 'libeay32.dll', 'ssleay32.dll', 'sqlite.dll':
-                    src = join(prefix, name)
-                    if os.path.exists(src):
-                        copyfile(src, join(bin_dir, name))
-
-    if os.path.splitext(os.path.basename(py_executable))[0] != expected_exe:
-        secondary_exe = os.path.join(os.path.dirname(py_executable),
-                                     expected_exe)
-        py_executable_ext = os.path.splitext(py_executable)[1]
-        if py_executable_ext == '.exe':
-            # python2.4 gives an extension of '.4' :P
-            secondary_exe += py_executable_ext
-        if os.path.exists(secondary_exe):
-            logger.warn('Not overwriting existing %s script %s (you must use %s)'
-                        % (expected_exe, secondary_exe, py_executable))
-        else:
-            logger.notify('Also creating executable in %s' % secondary_exe)
-            shutil.copyfile(sys.executable, secondary_exe)
-            make_exe(secondary_exe)
-
-    if '.framework' in prefix:
-        if 'Python.framework' in prefix:
-            logger.debug('MacOSX Python framework detected')
-            # Make sure we use the the embedded interpreter inside
-            # the framework, even if sys.executable points to
-            # the stub executable in ${sys.prefix}/bin
-            # See http://groups.google.com/group/python-virtualenv/
-            #                              browse_thread/thread/17cab2f85da75951
-            original_python = os.path.join(
-                prefix, 'Resources/Python.app/Contents/MacOS/Python')
-        if 'EPD' in prefix:
-            logger.debug('EPD framework detected')
-            original_python = os.path.join(prefix, 'bin/python')
-        shutil.copy(original_python, py_executable)
-
-        # Copy the framework's dylib into the virtual
-        # environment
-        virtual_lib = os.path.join(home_dir, '.Python')
-
-        if os.path.exists(virtual_lib):
-            os.unlink(virtual_lib)
-        copyfile(
-            os.path.join(prefix, 'Python'),
-            virtual_lib)
-
-        # And then change the install_name of the copied python executable
-        try:
-            mach_o_change(py_executable,
-                          os.path.join(prefix, 'Python'),
-                          '@executable_path/../.Python')
-        except:
-            e = sys.exc_info()[1]
-            logger.warn("Could not call mach_o_change: %s. "
-                        "Trying to call install_name_tool instead." % e)
-            try:
-                call_subprocess(
-                    ["install_name_tool", "-change",
-                     os.path.join(prefix, 'Python'),
-                     '@executable_path/../.Python',
-                     py_executable])
-            except:
-                logger.fatal("Could not call install_name_tool -- you must "
-                             "have Apple's development tools installed")
-                raise
-
-    if not is_win:
-        # Ensure that 'python', 'pythonX' and 'pythonX.Y' all exist
-        py_exe_version_major = 'python%s' % sys.version_info[0]
-        py_exe_version_major_minor = 'python%s.%s' % (
-            sys.version_info[0], sys.version_info[1])
-        py_exe_no_version = 'python'
-        required_symlinks = [ py_exe_no_version, py_exe_version_major,
-                         py_exe_version_major_minor ]
-
-        py_executable_base = os.path.basename(py_executable)
-
-        if py_executable_base in required_symlinks:
-            # Don't try to symlink to yourself.
-            required_symlinks.remove(py_executable_base)
-
-        for pth in required_symlinks:
-            full_pth = join(bin_dir, pth)
-            if os.path.exists(full_pth):
-                os.unlink(full_pth)
-            os.symlink(py_executable_base, full_pth)
-
-    if is_win and ' ' in py_executable:
-        # There's a bug with subprocess on Windows when using a first
-        # argument that has a space in it.  Instead we have to quote
-        # the value:
-        py_executable = '"%s"' % py_executable
-    # NOTE: keep this check as one line, cmd.exe doesn't cope with line breaks
-    cmd = [py_executable, '-c', 'import sys;out=sys.stdout;'
-        'getattr(out, "buffer", out).write(sys.prefix.encode("utf-8"))']
-    logger.info('Testing executable with %s %s "%s"' % tuple(cmd))
-    try:
-        proc = subprocess.Popen(cmd,
-                            stdout=subprocess.PIPE)
-        proc_stdout, proc_stderr = proc.communicate()
-    except OSError:
-        e = sys.exc_info()[1]
-        if e.errno == errno.EACCES:
-            logger.fatal('ERROR: The executable %s could not be run: %s' % (py_executable, e))
-            sys.exit(100)
-        else:
-            raise e
-
-    proc_stdout = proc_stdout.strip().decode("utf-8")
-    proc_stdout = os.path.normcase(os.path.abspath(proc_stdout))
-    norm_home_dir = os.path.normcase(os.path.abspath(home_dir))
-    if hasattr(norm_home_dir, 'decode'):
-        norm_home_dir = norm_home_dir.decode(sys.getfilesystemencoding())
-    if proc_stdout != norm_home_dir:
-        logger.fatal(
-            'ERROR: The executable %s is not functioning' % py_executable)
-        logger.fatal(
-            'ERROR: It thinks sys.prefix is %r (should be %r)'
-            % (proc_stdout, norm_home_dir))
-        logger.fatal(
-            'ERROR: virtualenv is not compatible with this system or executable')
-        if is_win:
-            logger.fatal(
-                'Note: some Windows users have reported this error when they '
-                'installed Python for "Only this user" or have multiple '
-                'versions of Python installed. Copying the appropriate '
-                'PythonXX.dll to the virtualenv Scripts/ directory may fix '
-                'this problem.')
-        sys.exit(100)
-    else:
-        logger.info('Got sys.prefix result: %r' % proc_stdout)
-
-    pydistutils = os.path.expanduser('~/.pydistutils.cfg')
-    if os.path.exists(pydistutils):
-        logger.notify('Please make sure you remove any previous custom paths from '
-                      'your %s file.' % pydistutils)
-    ## FIXME: really this should be calculated earlier
-
-    fix_local_scheme(home_dir)
-
-    if site_packages:
-        if os.path.exists(site_packages_filename):
-            logger.info('Deleting %s' % site_packages_filename)
-            os.unlink(site_packages_filename)
-
-    return py_executable
-
-
-def install_activate(home_dir, bin_dir, prompt=None):
-    home_dir = os.path.abspath(home_dir)
-    if is_win or is_jython and os._name == 'nt':
-        files = {
-            'activate.bat': ACTIVATE_BAT,
-            'deactivate.bat': DEACTIVATE_BAT,
-            'activate.ps1': ACTIVATE_PS,
-        }
-
-        # MSYS needs paths of the form /c/path/to/file
-        drive, tail = os.path.splitdrive(home_dir.replace(os.sep, '/'))
-        home_dir_msys = (drive and "/%s%s" or "%s%s") % (drive[:1], tail)
-
-        # Run-time conditional enables (basic) Cygwin compatibility
-        home_dir_sh = ("""$(if [ "$OSTYPE" "==" "cygwin" ]; then cygpath -u '%s'; else echo '%s'; fi;)""" %
-                       (home_dir, home_dir_msys))
-        files['activate'] = ACTIVATE_SH.replace('__VIRTUAL_ENV__', home_dir_sh)
-
-    else:
-        files = {'activate': ACTIVATE_SH}
-
-        # suppling activate.fish in addition to, not instead of, the
-        # bash script support.
-        files['activate.fish'] = ACTIVATE_FISH
-
-        # same for csh/tcsh support...
-        files['activate.csh'] = ACTIVATE_CSH
-
-    files['activate_this.py'] = ACTIVATE_THIS
-    if hasattr(home_dir, 'decode'):
-        home_dir = home_dir.decode(sys.getfilesystemencoding())
-    vname = os.path.basename(home_dir)
-    for name, content in files.items():
-        content = content.replace('__VIRTUAL_PROMPT__', prompt or '')
-        content = content.replace('__VIRTUAL_WINPROMPT__', prompt or '(%s)' % vname)
-        content = content.replace('__VIRTUAL_ENV__', home_dir)
-        content = content.replace('__VIRTUAL_NAME__', vname)
-        content = content.replace('__BIN_NAME__', os.path.basename(bin_dir))
-        writefile(os.path.join(bin_dir, name), content)
-
-def install_distutils(home_dir):
-    distutils_path = change_prefix(distutils.__path__[0], home_dir)
-    mkdir(distutils_path)
-    ## FIXME: maybe this prefix setting should only be put in place if
-    ## there's a local distutils.cfg with a prefix setting?
-    home_dir = os.path.abspath(home_dir)
-    ## FIXME: this is breaking things, removing for now:
-    #distutils_cfg = DISTUTILS_CFG + "\n[install]\nprefix=%s\n" % home_dir
-    writefile(os.path.join(distutils_path, '__init__.py'), DISTUTILS_INIT)
-    writefile(os.path.join(distutils_path, 'distutils.cfg'), DISTUTILS_CFG, overwrite=False)
-
-def fix_local_scheme(home_dir):
-    """
-    Platforms that use the "posix_local" install scheme (like Ubuntu with
-    Python 2.7) need to be given an additional "local" location, sigh.
-    """
-    try:
-        import sysconfig
-    except ImportError:
-        pass
-    else:
-        if sysconfig._get_default_scheme() == 'posix_local':
-            local_path = os.path.join(home_dir, 'local')
-            if not os.path.exists(local_path):
-                os.mkdir(local_path)
-                for subdir_name in os.listdir(home_dir):
-                    if subdir_name == 'local':
-                        continue
-                    os.symlink(os.path.abspath(os.path.join(home_dir, subdir_name)), \
-                                                            os.path.join(local_path, subdir_name))
-
-def fix_lib64(lib_dir):
-    """
-    Some platforms (particularly Gentoo on x64) put things in lib64/pythonX.Y
-    instead of lib/pythonX.Y.  If this is such a platform we'll just create a
-    symlink so lib64 points to lib
-    """
-    if [p for p in distutils.sysconfig.get_config_vars().values()
-        if isinstance(p, basestring) and 'lib64' in p]:
-        logger.debug('This system uses lib64; symlinking lib64 to lib')
-        assert os.path.basename(lib_dir) == 'python%s' % sys.version[:3], (
-            "Unexpected python lib dir: %r" % lib_dir)
-        lib_parent = os.path.dirname(lib_dir)
-        top_level = os.path.dirname(lib_parent)
-        lib_dir = os.path.join(top_level, 'lib')
-        lib64_link = os.path.join(top_level, 'lib64')
-        assert os.path.basename(lib_parent) == 'lib', (
-            "Unexpected parent dir: %r" % lib_parent)
-        if os.path.lexists(lib64_link):
-            return
-        os.symlink('lib', lib64_link)
-
-def resolve_interpreter(exe):
-    """
-    If the executable given isn't an absolute path, search $PATH for the interpreter
-    """
-    if os.path.abspath(exe) != exe:
-        paths = os.environ.get('PATH', '').split(os.pathsep)
-        for path in paths:
-            if os.path.exists(os.path.join(path, exe)):
-                exe = os.path.join(path, exe)
-                break
-    if not os.path.exists(exe):
-        logger.fatal('The executable %s (from --python=%s) does not exist' % (exe, exe))
-        raise SystemExit(3)
-    if not is_executable(exe):
-        logger.fatal('The executable %s (from --python=%s) is not executable' % (exe, exe))
-        raise SystemExit(3)
-    return exe
-
-def is_executable(exe):
-    """Checks a file is executable"""
-    return os.access(exe, os.X_OK)
-
-############################################################
-## Relocating the environment:
-
-def make_environment_relocatable(home_dir):
-    """
-    Makes the already-existing environment use relative paths, and takes out
-    the #!-based environment selection in scripts.
-    """
-    home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
-    activate_this = os.path.join(bin_dir, 'activate_this.py')
-    if not os.path.exists(activate_this):
-        logger.fatal(
-            'The environment doesn\'t have a file %s -- please re-run virtualenv '
-            'on this environment to update it' % activate_this)
-    fixup_scripts(home_dir)
-    fixup_pth_and_egg_link(home_dir)
-    ## FIXME: need to fix up distutils.cfg
-
-OK_ABS_SCRIPTS = ['python', 'python%s' % sys.version[:3],
-                  'activate', 'activate.bat', 'activate_this.py']
-
-def fixup_scripts(home_dir):
-    # This is what we expect at the top of scripts:
-    shebang = '#!%s/bin/python' % os.path.normcase(os.path.abspath(home_dir))
-    # This is what we'll put:
-    new_shebang = '#!/usr/bin/env python%s' % sys.version[:3]
-    if is_win:
-        bin_suffix = 'Scripts'
-    else:
-        bin_suffix = 'bin'
-    bin_dir = os.path.join(home_dir, bin_suffix)
-    home_dir, lib_dir, inc_dir, bin_dir = path_locations(home_dir)
-    for filename in os.listdir(bin_dir):
-        filename = os.path.join(bin_dir, filename)
-        if not os.path.isfile(filename):
-            # ignore subdirs, e.g. .svn ones.
-            continue
-        f = open(filename, 'rb')
-        try:
-            try:
-                lines = f.read().decode('utf-8').splitlines()
-            except UnicodeDecodeError:
-                # This is probably a binary program instead
-                # of a script, so just ignore it.
-                continue
-        finally:
-            f.close()
-        if not lines:
-            logger.warn('Script %s is an empty file' % filename)
-            continue
-        if not lines[0].strip().startswith(shebang):
-            if os.path.basename(filename) in OK_ABS_SCRIPTS:
-                logger.debug('Cannot make script %s relative' % filename)
-            elif lines[0].strip() == new_shebang:
-                logger.info('Script %s has already been made relative' % filename)
-            else:
-                logger.warn('Script %s cannot be made relative (it\'s not a normal script that starts with %s)'
-                            % (filename, shebang))
-            continue
-        logger.notify('Making script %s relative' % filename)
-        script = relative_script([new_shebang] + lines[1:])
-        f = open(filename, 'wb')
-        f.write('\n'.join(script).encode('utf-8'))
-        f.close()
-
-def relative_script(lines):
-    "Return a script that'll work in a relocatable environment."
-    activate = "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); execfile(activate_this, dict(__file__=activate_this)); del os, activate_this"
-    # Find the last future statement in the script. If we insert the activation
-    # line before a future statement, Python will raise a SyntaxError.
-    activate_at = None
-    for idx, line in reversed(list(enumerate(lines))):
-        if line.split()[:3] == ['from', '__future__', 'import']:
-            activate_at = idx + 1
-            break
-    if activate_at is None:
-        # Activate after the shebang.
-        activate_at = 1
-    return lines[:activate_at] + ['', activate, ''] + lines[activate_at:]
-
-def fixup_pth_and_egg_link(home_dir, sys_path=None):
-    """Makes .pth and .egg-link files use relative paths"""
-    home_dir = os.path.normcase(os.path.abspath(home_dir))
-    if sys_path is None:
-        sys_path = sys.path
-    for path in sys_path:
-        if not path:
-            path = '.'
-        if not os.path.isdir(path):
-            continue
-        path = os.path.normcase(os.path.abspath(path))
-        if not path.startswith(home_dir):
-            logger.debug('Skipping system (non-environment) directory %s' % path)
-            continue
-        for filename in os.listdir(path):
-            filename = os.path.join(path, filename)
-            if filename.endswith('.pth'):
-                if not os.access(filename, os.W_OK):
-                    logger.warn('Cannot write .pth file %s, skipping' % filename)
-                else:
-                    fixup_pth_file(filename)
-            if filename.endswith('.egg-link'):
-                if not os.access(filename, os.W_OK):
-                    logger.warn('Cannot write .egg-link file %s, skipping' % filename)
-                else:
-                    fixup_egg_link(filename)
-
-def fixup_pth_file(filename):
-    lines = []
-    prev_lines = []
-    f = open(filename)
-    prev_lines = f.readlines()
-    f.close()
-    for line in prev_lines:
-        line = line.strip()
-        if (not line or line.startswith('#') or line.startswith('import ')
-            or os.path.abspath(line) != line):
-            lines.append(line)
-        else:
-            new_value = make_relative_path(filename, line)
-            if line != new_value:
-                logger.debug('Rewriting path %s as %s (in %s)' % (line, new_value, filename))
-            lines.append(new_value)
-    if lines == prev_lines:
-        logger.info('No changes to .pth file %s' % filename)
-        return
-    logger.notify('Making paths in .pth file %s relative' % filename)
-    f = open(filename, 'w')
-    f.write('\n'.join(lines) + '\n')
-    f.close()
-
-def fixup_egg_link(filename):
-    f = open(filename)
-    link = f.readline().strip()
-    f.close()
-    if os.path.abspath(link) != link:
-        logger.debug('Link in %s already relative' % filename)
-        return
-    new_link = make_relative_path(filename, link)
-    logger.notify('Rewriting link %s in %s as %s' % (link, filename, new_link))
-    f = open(filename, 'w')
-    f.write(new_link)
-    f.close()
-
-def make_relative_path(source, dest, dest_is_directory=True):
-    """
-    Make a filename relative, where the filename is dest, and it is
-    being referred to from the filename source.
-
-        >>> make_relative_path('/usr/share/something/a-file.pth',
-        ...                    '/usr/share/another-place/src/Directory')
-        '../another-place/src/Directory'
-        >>> make_relative_path('/usr/share/something/a-file.pth',
-        ...                    '/home/user/src/Directory')
-        '../../../home/user/src/Directory'
-        >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
-        './'
-    """
-    source = os.path.dirname(source)
-    if not dest_is_directory:
-        dest_filename = os.path.basename(dest)
-        dest = os.path.dirname(dest)
-    dest = os.path.normpath(os.path.abspath(dest))
-    source = os.path.normpath(os.path.abspath(source))
-    dest_parts = dest.strip(os.path.sep).split(os.path.sep)
-    source_parts = source.strip(os.path.sep).split(os.path.sep)
-    while dest_parts and source_parts and dest_parts[0] == source_parts[0]:
-        dest_parts.pop(0)
-        source_parts.pop(0)
-    full_parts = ['..']*len(source_parts) + dest_parts
-    if not dest_is_directory:
-        full_parts.append(dest_filename)
-    if not full_parts:
-        # Special case for the current directory (otherwise it'd be '')
-        return './'
-    return os.path.sep.join(full_parts)
-
-
-
-############################################################
-## Bootstrap script creation:
-
-def create_bootstrap_script(extra_text, python_version=''):
-    """
-    Creates a bootstrap script, which is like this script but with
-    extend_parser, adjust_options, and after_install hooks.
-
-    This returns a string that (written to disk of course) can be used
-    as a bootstrap script with your own customizations.  The script
-    will be the standard virtualenv.py script, with your extra text
-    added (your extra text should be Python code).
-
-    If you include these functions, they will be called:
-
-    ``extend_parser(optparse_parser)``:
-        You can add or remove options from the parser here.
-
-    ``adjust_options(options, args)``:
-        You can change options here, or change the args (if you accept
-        different kinds of arguments, be sure you modify ``args`` so it is
-        only ``[DEST_DIR]``).
-
-    ``after_install(options, home_dir)``:
-
-        After everything is installed, this function is called.  This
-        is probably the function you are most likely to use.  An
-        example would be::
-
-            def after_install(options, home_dir):
-                subprocess.call([join(home_dir, 'bin', 'easy_install'),
-                                 'MyPackage'])
-                subprocess.call([join(home_dir, 'bin', 'my-package-script'),
-                                 'setup', home_dir])
-
-        This example immediately installs a package, and runs a setup
-        script from that package.
-
-    If you provide something like ``python_version='2.5'`` then the
-    script will start with ``#!/usr/bin/env python2.5`` instead of
-    ``#!/usr/bin/env python``.  You can use this when the script must
-    be run with a particular Python version.
-    """
-    filename = __file__
-    if filename.endswith('.pyc'):
-        filename = filename[:-1]
-    f = codecs.open(filename, 'r', encoding='utf-8')
-    content = f.read()
-    f.close()
-    py_exe = 'python%s' % python_version
-    content = (('#!/usr/bin/env %s\n' % py_exe)
-               + '## WARNING: This file is generated\n'
-               + content)
-    return content.replace('##EXT' 'END##', extra_text)
-
-
-
-default_target_dir = 'venv'
-
-pip_install_packages = filter(len, open('requirements.txt').readlines())
-
-import os
-import subprocess
-import sys
-
-def adjust_options(options, args):
-  if len(args)==0:
-    os.chdir(os.path.dirname(__file__))
-    args.append(default_target_dir)
-
-def after_install(options, home_dir):
-  from os.path import join
-  pip = join(home_dir, 'bin/pip')
-  if not os.path.exists(pip):
-    # on windows
-    pip = join(home_dir, 'Scripts/pip.exe')
-  if not os.path.exists(pip):
-    print "error", pip, "is missing"
-  if sys.version_info < (2, 7):
-    subprocess.call([pip, 'install', 'importlib'])
-  for prog in pip_install_packages:
-    subprocess.call([pip, 'install', prog])
-
-
-
-def convert(s):
-    b = base64.b64decode(s.encode('ascii'))
-    return zlib.decompress(b).decode('utf-8')
-
-##file site.py
-SITE_PY = convert("""
-eJzFPf1z2zaWv/OvwMqToZTIdOK0vR2nzo2TOK3v3MTbpLO5dT1aSoIs1hTJEqRl7c3d337vAwAB
-kpLtTXdO04klEnh4eHhfeHgPHQwGJ0Uhs7lY5fM6lULJuJwtRRFXSyUWeSmqZVLO94u4rDbwdHYT
-X0slqlyojYqwVRQET7/yEzwVn5eJMijAt7iu8lVcJbM4TTciWRV5Wcm5mNdlkl2LJEuqJE6Tf0CL
-PIvE06/HIDjLBMw8TWQpbmWpAK4S+UJcbKplnolhXeCcX0Tfxi9HY6FmZVJU0KDUOANFlnEVZFLO
-AU1oWSsgZVLJfVXIWbJIZrbhOq/TuSjSeCbF3//OU6OmYRiofCXXS1lKkQEyAFMCrALxgK9JKWb5
-XEZCvJGzGAfg5w2xAoY2xjVTSMYsF2meXcOcMjmTSsXlRgyndUWACGUxzwGnBDCokjQN1nl5o0aw
-pLQea3gkYmYPfzLMHjBPHL/LOYDjxyz4JUvuxgwbuAfBVUtmm1IukjsRI1j4Ke/kbKKfDZOFmCeL
-BdAgq0bYJGAElEiT6UFBy/G9XqHXB4SV5coYxpCIMjfml9QjCs4qEacK2LYukEaKMH8np0mcATWy
-WxgOIAJJg75x5omq7Dg0O5EDgBLXsQIpWSkxXMVJBsz6UzwjtP+aZPN8rUZEAVgtJX6rVeXOf9hD
-AGjtEGAc4GKZ1ayzNLmR6WYECHwG7Eup6rRCgZgnpZxVeZlIRQAAtY2Qd4D0WMSl1CRkzjRyOyb6
-E02SDBcWBQwFHl8iSRbJdV2ShIlFApwLXPH+48/i3embs5MPmscMMJbZ6xXgDFBooR2cYABxUKvy
-IM1BoKPgHP+IeD5HIbvG8QGvpsHBvSsdDGHuRdTu4yw4kF0vrh4G5liBMqGxAur339BlrJZAn/+5
-Z72D4GQbVWji/G29zEEms3glxTJm/kLOCL7XcF5HRbV8BdygEE4FpFK4OIhggvCAJC7NhnkmRQEs
-liaZHAVAoSm19VcRWOFDnu3TWrc4ASCUQQYvnWcjGjGTMNEurFeoL0zjDc1MNwnsOq/ykhQH8H82
-I12UxtkN4aiIofjbVF4nWYYIIS8E4V5IA6ubBDhxHolzakV6wTQSIWsvbokiUQMvIdMBT8q7eFWk
-cszii7p1txqhwWQlzFqnzHHQsiL1SqvWTLWX9w6jLy2uIzSrZSkBeD31hG6R52MxBZ1N2BTxisWr
-WufEOUGPPFEn5AlqCX3xO1D0RKl6Je1L5BXQLMRQwSJP03wNJDsKAiH2sJExyj5zwlt4B/8CXPw3
-ldVsGQTOSBawBoXIbwOFQMAkyExztUbC4zbNym0lk2SsKfJyLksa6mHEPmDEH9gY5xp8yCtt1Hi6
-uMr5KqlQJU21yUzY4mVhxfrxFc8bpgGWWxHNTNOGTiucXlos46k0LslULlAS9CK9sssOYwY9Y5It
-rsSKrQy8A7LIhC1Iv2JBpbOoJDkBAIOFL86Sok6pkUIGEzEMtCoI/ipGk55rZwnYm81ygAqJzfcM
-7A/g9g8Qo/UyAfrMAAJoGNRSsHzTpCrRQWj0UeAbfdOfxwdOPVto28RDLuIk1VY+zoIzenhaliS+
-M1lgr7EmhoIZZhW6dtcZ0BHFfDAYBIFxhzbKfM1VUJWbI2AFYcaZTKZ1goZvMkFTr3+ogEcRzsBe
-N9vOwgMNYTp9ACo5XRZlvsLXdm6fQJnAWNgj2BMXpGUkO8geJ75C8rkqvTBN0XY77CxQDwUXP5++
-P/ty+kkci8tGpY3b+uwKxjzNYmBrsgjAVK1hG10GLVHxJaj7xHsw78QUYM+oN4mvjKsaeBdQ/1zW
-9BqmMfNeBqcfTt6cn05++XT68+TT2edTQBDsjAz2aMpoHmtwGFUEwgFcOVeRtq9Bpwc9eHPyyT4I
-JomafPcNsBs8GV7LCpi4HMKMxyJcxXcKGDQcU9MR4thpABY8HI3Ea3H49OnLQ4JWbIoNAAOz6zTF
-hxNt0SdJtsjDETX+jV36Y1ZS2n+7PPrmShwfi/C3+DYOA/ChmqbMEj+ROH3eFBK6VvBnmKtREMzl
-AkTvRqKADp+SXzziDrAk0DLXdvq3PMnMe+ZKdwjSH0PqAThMJrM0VgobTyYhEIE69HygQ8TONUrd
-EDoWG7frSKOCn1LCwmbYZYz/9KAYT6kfosEoul1MIxDX1SxWklvR9KHfZII6azIZ6gFBmEliwOFi
-NRQK0wR1VpmAX0uchzpsqvIUfyJ81AIkgLi1Qi2Ji6S3TtFtnNZSDZ1JARGHwxYZUdEmivgRXJQh
-WOJm6UajNjUNz0AzIF+agxYtW5TDzx74O6CuzCYON3q892KaIab/wTsNwgFczhDVvVItKKwdxcXp
-hXj5/HAf3RnYc84tdbzmaKGTrJb24QJWy8gDI8y9jLy4dFmgnsWnR7thriK7Ml1WWOglLuUqv5Vz
-wBYZ2Fll8TO9gZ05zGMWwyqCXid/gFWo8Rtj3Ify7EFa0HcA6q0Iill/s/R7HAyQmQJFxBtrIrXe
-9bMpLMr8NkFnY7rRL8FWgrJEi2kcm8BZOI/J0CSChgAvOENKrWUI6rCs2WElvBEk2ot5o1gjAneO
-mvqKvt5k+Tqb8E74GJXucGRZFwVLMy82aJZgT7wHKwRI5rCxa4jGUMDlFyhb+4A8TB+mC5SlvQUA
-AkOvaLvmwDJbPZoi7xpxWIQxeiVIeEuJ/sKtGYK2WoYYDiR6G9kHRksgJJicVXBWNWgmQ1kzzWBg
-hyQ+151HvAX1AbSoGIHZHGpo3MjQ7/IIlLM4d5WS0w8t8pcvX5ht1JLiK4jYFCeNLsSCjGVUbMCw
-JqATjEfG0RpigzU4twCmVpo1xf4nkRfsjcF6XmjZBj8AdndVVRwdHKzX60hHF/Ly+kAtDr7983ff
-/fk568T5nPgHpuNIiw61RQf0Dj3a6HtjgV6blWvxY5L53EiwhpK8MnJFEb8f6mSei6P9kdWfyMWN
-mcZ/jSsDCmRiBmUqA20HDUZP1P6T6KUaiCdknW3b4Yj9Em1SrRXzrS70qHLwBMBvmeU1muqGE5R4
-BtYNduhzOa2vQzu4ZyPND5gqyunQ8sD+iyvEwOcMw1fGFE9QSxBboMV3SP8zs01M3pHWEEheNFGd
-3fOmX4sZ4s4fLu/W13SExswwUcgdKBF+kwcLoG3clRz8aNcW7Z7j2pqPZwiMpQ8M82rHcoiCQ7jg
-WoxdqXO4Gj1ekKY1q2ZQMK5qBAUNTuKUqa3BkY0MESR6N2azzwurWwCdWpFDEx8wqwAt3HE61q7N
-Co4nhDxwLF7QEwku8lHn3XNe2jpNKaDT4lGPKgzYW2i00znw5dAAGItB+cuAW5ptysfWovAa9ADL
-OQaEDLboMBO+cX3Awd6gh506Vn9bb6ZxHwhcpCHHoh4EnVA+5hFKBdJUDP2e21jcErc72E6LQ0xl
-lolEWm0Rrrby6BWqnYZpkWSoe51FimZpDl6x1YrESM1731mgfRA+7jNmWgI1GRpyOI2OydvzBDDU
-7TB8dl1joMGNwyBGq0SRdUMyLeEfcCsovkHBKKAlQbNgHipl/sT+AJmz89VftrCHJTQyhNt0mxvS
-sRgajnm/J5CMOhoDUpABCbvCSK4jq4MUOMxZIE+44bXcKt0EI1IgZ44FITUDuNNLb4ODTyI8ASEJ
-Rch3lZKFeCYGsHxtUX2Y7v5DudQEIYZOA3IVdPTi2I1sOFGN41aUw2doP75BZyVFDhw8BZfHDfS7
-bG6Y1gZdwFn3FbdFCjQyxWEGIxfVK0MYN5j8p2OnRUMsM4hhKG8g70jHjDQK7HJr0LDgBoy35u2x
-9GM3YoF9h2GuDuXqDvZ/YZmoWa5Cipm0YxfuR3NFlzYW2/NkOoA/3gIMRlceJJnq+AVGWf6JQUIP
-etgH3ZsshkXmcblOspAUmKbfsb80HTwsKT0jd/CJtlMHMFGMeB68L0FA6OjzAMQJNQHsymWotNvf
-BbtzigMLl7sPPLf58ujlVZe4420RHvvpX6rTu6qMFa5WyovGQoGr1TXgqHRhcnG20YeX+nAbtwll
-rmAXKT5++iKQEBzXXcebx029YXjE5t45eR+DOui1e8nVmh2xCyCCWhEZ5SB8PEc+HNnHTm7HxB4B
-5FEMs2NRDCTNJ/8MnF0LBWPszzcZxtHaKgM/8Pq7byY9kVEXye++GdwzSosYfWI/bHmCdmROKtg1
-21LGKbkaTh8KKmYN69g2xYj1OW3/NI9d9ficGi0b++5vgR8DBUPqEnyE5+OGbN2p4sd3p7bC03Zq
-B7DObtV89mgRYG+fT3+DHbLSQbXbOEnpXAEmv7+PytVs7jle0a89PEg7FYxDgr79l7p8DtwQcjRh
-1J2OdsZOTMC5ZxdsPkWsuqjs6RyC5gjMywtwjz+7ULUFM4z7nI8XDntUkzfjPmfia9Qqfv4QDWSB
-eTQY9JF9Kzv+f8zy+b9mkg+cijm5/gOt4SMB/VEzYePB0LTx8GH1L7trdw2wB5inLW7nDrewOzSf
-VS6Mc8cqSYmnqLueijWlK1BsFU+KAMqc/b4eOLiM+tD7bV2WfHRNKrCQ5T4ex44FZmoZz6/XxOyJ
-gw+yQkxssxnFqp28nrxPjYQ6+mxnEjb7hn45W+YmZiWz26SEvqBwh+GPH386DftNCMZxodPDrcjD
-/QaE+wimDTVxwsf0YQo9pss/L1XtrYtPUJMRYCLCmmy99sEPBJs4Qv8a3BMR8g5s+Zgdd+izpZzd
-TCSlDiCbYlcnKP4WXyMmNqPAz/9S8YKS2GAms7RGWrHjjdmHizqb0flIJcG/0qnCmDpECQEc/luk
-8bUYUuc5hp40N1J06jYutfdZlDkmp4o6mR9cJ3Mhf6/jFLf1crEAXPDwSr+KeHiKQIl3nNPASYtK
-zuoyqTZAgljl+uyP0h+chtMNT3ToIcnHPExATIg4Ep9w2vieCTc35DLBAf/EAyeJ+27s4CQrRPQc
-3mf5BEedUI7vmJHqnsvT46A9Qg4ABgAU5j8Y6cid/0bSK/eAkdbcJSpqSY+UbqQhJ2cMoQxHGOng
-3/TTZ0SXt7Zgeb0dy+vdWF63sbzuxfLax/J6N5auSODC2qCVkYS+wFX7WKM338aNOfEwp/Fsye0w
-9xNzPAGiKMwG28gUp0B7kS0+3yMgpLadA2d62OTPJJxUWuYcAtcgkfvxEEtv5k3yutOZsnF0Z56K
-cWe35RD5fQ+iiFLFptSd5W0eV3HkycV1mk9BbC264wbAWLTTiThWmt1OphzdbVmqwcV/ff7x4wds
-jqAGJr2BuuEiomHBqQyfxuW16kpTs/krgB2ppZ+IQ900wL0HRtZ4lD3+5x1leCDjiDVlKOSiAA+A
-srpsMzf3KQxbz3WSlH7OTM6HTcdikFWDZlJbiHRycfHu5PPJgEJ+g/8duAJjaOtLh4uPaWEbdP03
-t7mlOPYBodaxrcb4uXPyaN1wxP021oDt+PCtB4cPMdi9YQJ/lv9SSsGSAKEiHfx9DKEevAf6qm1C
-hz6GETvJf+7JGjsr9p0je46L4oh+37FDewD/sBP3GBMggHahhmZn0GymWkrfmtcdFHWAPtDX++ot
-WHvr1d7J+BS1k+hxAB3K2mbb3T/vnIaNnpLVm9Mfzj6cn725OPn8o+MCoiv38dPBoTj96Yug/BA0
-YOwTxZgaUWEmEhgWt9BJzHP4r8bIz7yuOEgMvd6dn+uTmhWWumDuM9qcCJ5zGpOFxkEzjkLbhzr/
-CDFK9QbJqSmidB2qOcL90orrWVSu86OpVGmKzmqtt166VszUlNG5dgTSB41dUjAITjGDV5TFXpld
-YckngLrOqgcpbaNtYkhKQcFOuoBz/mVOV7xAKXWGJ01nregvQxfX8CpSRZrATu5VaGVJd8P0mIZx
-9EN7wM149WlApzuMrBvyrLdigVbrVchz0/1HDaP9XgOGDYO9g3lnktJDKAMbk9tEiI34JCeUd/DV
-Lr1eAwULhgd9FS6iYboEZh/D5losE9hAAE8uwfriPgEgtFbCPxA4cqIDMsfsjPDtar7/l1ATxG/9
-6689zasy3f+bKGAXJDiVKOwhptv4HWx8IhmJ04/vRyEjR6m54i81lgeAQ0IBUEfaKX+JT9AnQyXT
-hc4v8fUBvtB+Ar1udS9lUeru/a5xiBLwRA3Ja3iiDP1CTPeysMc4lVELNFY+WMywgtBNQzCfPfFp
-KdNU57ufvTs/Bd8RizFQgvjc7RSG43gJHqHr5DuucGyBwgN2eF0iG5fowlKSxTzymvUGrVHkqLeX
-l2HXiQLD3V6dKHAZJ8pFe4jTZlimnCBCVoa1MMvKrN1qgxR22xDFUWaYJSYXJSWw+jwBvExPY94S
-wV4JSz1MBJ5PkZOsMhmLaTIDPQoqFxTqGIQEiYv1jMR5ecYx8LxUpgwKHhabMrleVni6AZ0jKsHA
-5j+dfDk/+0BlCYcvG6+7hznHtBMYcxLJMaYIYrQDvrhpf8hVk0kfz+pXCAO1D/xpv+LslGMeoNOP
-A4v4p/2K69COnZ0gzwAUVF20xQM3AE63PrlpZIFxtftg/LgpgA1mPhiKRWLZi070cOfX5UTbsmVK
-KO5jXj7iAGdR2JQ03dlNSWt/9BwXBZ5zzYf9jeBtn2yZzxS63nTebEt+cz8dKcSSWMCo29ofw2SH
-dZrq6TjMto1baFurbeyvmRMrddrNMhRlIOLQ7TxymaxfCevmzIFeGnUHmPheo2sksVeVD37NBtrD
-8DCxxO7sU0xHKmMhI4CRDKlrf2rwodAigAKh7N+hI7nj0dNDb46ONbh/jlp3gW38ERShzsWlGo+8
-BE6EL7+z48ivCC3Uo0cidDyVTGa5zRPDz3qJXuULf469MkBBTBS7Ms6u5ZBhjQ3MZz6xt4RgSdt6
-pL5MrvoMizgD5/RuC4d35aL/4MSg1mKETrsbuWmrI5882KC3FGQnwXzwZbwG3V/U1ZBXcss5dG8t
-3Xao90PE7ENoqk/fhyGGY34Pt6xPA7iXGhoWeni/bzmF5bUxjqy1j62qptC+0B7srIStWaXoWMYp
-TjS+qPUCGoN73Jj8gX2qE4Xs7546MScmZIHy4C5Ib24D3aAVThhwuRJXjiaUDt9U0+h3c3krUzAa
-YGSHWO3wm612GEU2nNKbB/bV2F1sLjb9uNGbBrMjU46BnpkqYP2iTFYHiE5vxGcXZg0yuNS/6i1J
-nN2Ql/z2r2dj8fbDz/DvG/kRTCkWP47F3wAN8TYvYX/J1bt0rQJWclS8ccxrhRWSBI2OKvgGCnTb
-Ljw647GILjHxa0usphSYVVuu+NoTQJEnSBXtjZ9gCifgt6nsanmjxlPsW5SBfok02F7sggUiB7pl
-tKxWKdoLJ0rSrObl4Pzs7emHT6dRdYccbn4OnCiKn5CF09FnxCWeh42FfTKr8cmV4zj/KNOix2/W
-m05TOIObThHCvqSwG02+UiO2m4u4xMiBKDbzfBZhS2B5rtWr1uBIj5z95b2G3rOyCGs40qdojTeP
-j4Ea4te2IhpAQ+qj50Q9CaF4ikVj/Dga9JvisaDQNvx5erOeu5FxXf1DE2xj2sx66He3unDJdNbw
-LCcRXsd2GUxBaJrEajWduYWCHzOhb0QBLUfnHHIR12klZAaSS5t8upoCNL1b28cSwqzC5owK3ihM
-k67jjXKSkGIlBjjqgKrr8UCGIoawB/8pvmF7gEWHouZaaIBOiNL+KXe6qnq2ZAnmLRFRryfxYJ1k
-L918Hk1hHpR3yLPGkYV5otvIGF3LSs+fHwxHly+aTAeKSs+8yt5ZAVbPZZM9UJ3F06dPB+Lf7/d+
-GJUozfMbcMsAdq/Xck6vt1huPTm7Wl3P3ryJgB9nS3kJD64oem6f1xmFJnd0pQWR9q+BEeLahJYZ
-TfuWXeagXckHzdyCD6y05fglS+jeIwwtSVS2+vooDDsZaSKWBMUQxmqWJCGHKWA9NnmNRXkYZtT8
-Iu+A4xMEM8a3eELGW+0lepiUQGu5x6JzLAYEeEC5ZTwaVTVTWRrgObnYaDQnZ1lSNfUkz93DU30X
-QGWvM9J8JeI1SoaZR4sYTn2nx6qNh53vZFFvx5LPLt2AY2uW/Po+3IG1QdLyxcJgCg/NIs1yWc6M
-OcUVS2ZJ5YAx7RAOd6ZbnMj6REEPSgNQ72QV5lai7ds/2XVxMf1I58j7ZiSdPlTZm7E4OBRnrQTD
-KGrGpzCUJaTlW/NlBKN8oLC29gS8scSfdFAViwm8CzzcusY60xdzcP5Gc1sHwKHLoKyCtOzo6Qjn
-BjILn5l2y3Ua+KEtOuF2m5RVHacTff/DBB22iT1Y13jaeridlZ7WWwEnPwcPeF+n7oPjYLJskJ6Y
-emtKM47FQocoIrfEzK/GKnL08g7ZVwKfAikzn5jCaBNEurTsaitOdc6mo+IR1DNTxbTFMzflM53K
-ExfzMeU5mbqHLV60waV9kYV4fSyGL8bi29ZGaFZs8GInQPnJPHoyD32fjLpeHh02dqa78WxB2Ark
-5dWjp5smU5pe2Jdzfn9fnXSIG8AVyM4ikfP9JwqxY5y/FqqG0sxrO6fQjLEkfc9mPelq7KZGhUrR
-puDVrxuF4qgW43/aQUyZt9YDXBGLQssWyFbxm8STVvKfvbcNEwM1ev7Koucy6Tucwm94Wwq81wR1
-HZ2th5Y6rd6C7dmT69pJPoJqGjYcf69H9ShRaueId1rh8WQjcS7rP4KHQ7pZhpjmWetY+F/JPJy0
-v+1wsYPld9/swtNVML1lEj0Lurt2gZe6XbDQLLf59Ie6PEbp6/pVAuNAaUQHvD5z+SP5a0eYD8y3
-uuQ2L3iF1yvSWS/allS6/gfvSfkeLXQIaBNO6VmwFuCS1As8mr2l2yJPFKWR4aUv3xy+GJtaWwak
-J/AyevlMX6pI3cx1Ar6zOtabIHip+x1G/+YASyq/t33V2RbQtI5btyv5g4UUjxpFE0uHxnLcX1nR
-rFks8BbChpjspNorNd6D2zAFh8FcJ5qD5wM7u6gPXVdjNNK7TbVtEeCtwUP72SY5D+raKFJEepew
-bVOeuxTno0VB9+q3ILgXR85fxvwGfaq6OLKxKmNT8Cxx6OZH4qe66a3kYnuCxrW6CXdNn/vvmrtu
-EdiZm/SAztz9ik2XBrrvdivaRwOOE2hCPKjooNH4/cbEtQNjnZXSH/PWHyS/2wlnusWs3AfG5MBg
-BJ3YU2NvzP4qnrnfMcVqn684dgt0e52N1rQ7NqPN8Q/xFDidBJ/bmn3KEZprDuSNB91ZN+Gs04m8
-vlaTGO9LnNBulTKkOtsQs/95T9fdyVhtzLYFrwECEIabdC6rm64OjAG6ku9t5gQj574XQUNTGq6T
-16uSOZsEvUcCcBGHHqm/CW1zYu4glRgxVnVZlLCtHOjbfTnzpS9ZuAFqImGrWN0Y1E2Psb7slRQr
-pVuZol4OeLbSZoAIbMQ7pmEyse+AV543FxckY8sMMqtXsoyr5tIe/4w9Ea+dEaiMGxfXiXM1Utni
-EhexxPKGgxRGmuz3Z7BD83anO24qGFlt93B2oh46dvqYSxAcY2S4OLmzF/a5F0XN6bJo1zu0zRqu
-s5cUwTKY2+dIR+qgE7/VN2Lxra0cEkf/0uEfkHe3ltHP67bqjL1bi4bzzFUI3SuQsAafjHPfzYYd
-DujeYdjaodrxfX1hGaXjYW5pbKmoffJehdOMNmpCMZiCeU8oxk+zf2QoxoP/wFCMvocSDI3GR+uB
-3sT7e2I2rB7cSx0bRoA+EyASHgm3rgQ0pnLoprEXuUruBvaKZtaVTm2cMQ/Ikd3bvggEX96o3Jxf
-73K1XaEYX7ro8Q/nH9+cnBMtJhcnb//z5AdKc8Jzh5atenCsKsv3mdr7XkK1G7fSqSl9gzfY9ty5
-ylVBGkLnfedUvwdCfwVY34K2FZn7eluHTiVNtxMgvnvaLajbVHYv5I5fpqs23ISUVuZzoJ9ymqr5
-5Zz1m0fmyIvFoTnSMu+bUwgto50g7baFcxJGu+pE+6v6Xs0tAeSRTVumFcDDB+Qve/ZgalBshJsd
-lPb/OINyrbF+z9xJA1I4k87diHQtIoOq/P9DRwnKLsa9HTuKY3vbNbXjcxZlr3HHQ9SZjAxBvAK6
-QXd+rrDPZbqFCkHACk/f/MeIGP2nTybtOf4TJS73qVR3H5XNlf2Fa6ad278meFpf2Ru0FKf88Hkl
-NF7UqXsCb/t0OpDTR8c6+cKpDQHNdwB0bsRTAXujv8QKcboRIWwctUuG6aZER339nYM82k0He0Or
-52J/WyGnW8goxIvtDeetWknd45B7qHt6qNqUyzkWGPMet1VoitcEmc8FBV2Z5TkfeBitt/3w9fby
-xZGN0iO/42tHkVB+1sAx7JdOfuPOaxqd7sQs5ZgS4HCv5tT36hZXDlT2CbbtbTpFHlv2PyZhgCEN
-vPf9ITPTw7vMftDG1LLeEUxJDJ+oEU3LKYvRuNsno+50G7XVBcIlPg8A0lGBAAvBdHSjk3K54bzp
-4XO9G5zWdMGte1QTOlJB6Vc+R3AP4/s1+LW7U2nug7oziqY/N2hzoF5yEG72HbjVyAuFbDcJ7ak3
-fLDFBeAq5/7+Lx7Qv5sYaLsf7vKrbauXvZV17MtiLimm2LRIZB5HYGRAbw5JW2MBghF0vNiloaPL
-UM3ckC/Q8aP8VLy+mjYY5MxOtAdgjULwf2RtvCc=
-""")
-
-##file ez_setup.py
-EZ_SETUP_PY = convert("""
-eJzNWmmP20YS/a5fwSgYSIJlDu9DhrzIJg5gIMgGuYCFPavpc8SYIhWS8li7yH/f181DJDWcJIt8
-WAbOzJDN6qpXVa+qWvr8s+O52ufZbD6f/z3Pq7IqyNEoRXU6VnmelkaSlRVJU1IlWDR7K41zfjIe
-SVYZVW6cSjFcq54WxpGwD+RBLMr6oXk8r41fTmWFBSw9cWFU+6ScySQV6pVqDyHkIAyeFIJVeXE2
-HpNqbyTV2iAZNwjn+gW1oVpb5Ucjl/VOrfzNZjYzcMkiPxji3zt930gOx7yolJa7i5Z63fDWcnVl
-WSF+PUEdgxjlUbBEJsz4KIoSIKi9L6+u1e9YxfPHLM0Jnx2SosiLtZEXGh2SGSStRJGRSnSLLpau
-9aYMq3hulLlBz0Z5Oh7Tc5I9zJSx5Hgs8mORqNfzo3KCxuH+fmzB/b05m/2oYNK4Mr2xkiiM4oTf
-S2UKK5KjNq/xqtby+FAQ3vejqYJh1oBXnsvZV2++/uKnb37c/fzm+x/e/uNbY2vMLTNgtj3vHv30
-/TcKV/VoX1XHze3t8XxMzDq4zLx4uG2Cory9KW/xX7fb7dy4UbuYDb7vNu7dbHbg/o6TikDgf7TH
-Fpc3XmJzar88nh3TNcXDw2JjLKLIcRiRsWU7vsUjL6JxHNBQOj4LRMDIYv2MFK+VQsOYRMSzXOH5
-liMpjXwhXGnHnh26PqMTUpyhLn7gh6Ef84gEPJLM86zQIjG3Qid0eBw/L6XTxYMBJOJ2EHOHiiCw
-JXEdEgjfEZ6MnCmL3KEulLo2syQL3TgmgeuHcRz6jPBY+sQK7OhZKZ0ubkQihrs8EIw7juOF0g5j
-GXISBLEkbEKKN9QlcCzPJ44nuCdsQVkYSmG5MSGeCGQo/GelXHBh1CF25EOPiBMmJXW4DX0sl7rU
-Zt7TUtgoXqgrHer7bswD+DWUoUd4GNsOBJHYiiYsYuN4gT1ccCAZhNzhjpTC9iwrdgNPOsSb8DSz
-raEyDHA4hPrcJZbjB54fwD/MdiPLIqEVW8+L6bTxQ44X4aOYRlYYOsyPie+SyHNd4nM+iUwtxm/F
-cOEFhEXAMg5ZFPt+6AhfRD7CUdCIhc+LCTptIoFMIkJaAQBymAg824M0B0YC8Alvg1SG2DiUCIIc
-tl2O95FGTiRCSnzqE2jExfNiLp7igRvLmFoQ5jHP8eLQcj0umCOYxZxJT9lDbAKPxZ50qQxJiCh0
-BYtcYVEH7g69mDrPi+mwoZLEjm1ZlMNNHDkBSYJzF44PPCsKJsSMeEZaVuBRGRDi0JBbUAvIeghs
-K7JD5kw5asQzgR3YsSMEc33phQJeswPGA2I7kOqEU1JGPCPtCAQF8uUSoUIcP2YxpEibhzSM5ARb
-sRHPCEvw0Asih8VxRCUNgXRkIXot+Dy0p5ztDp1EqJB2IDmHYb7v217k2SwEf/E4igN/SsqIrahF
-Y9u1CSPUdSyAAZ4LpecxH0QR2vJZKZ1FCBKJPQPuSSpdZBSVsRcwC1CB9cRUwHhDiyLF1iB+12Gc
-xix0KJMe6MsJpBMROcVW/tAiIWLJIwvqICERsdIV4HQ/BGHwyA6mPO0PLSISXMUlqoodWrYQADdE
-cfIpQ8EjwRTL+CMfRdyVAQjBY4yQKLQ9BA53Q8oYd7nPJ6QEQ4uQMBGqfGTbASpRFHmhAxGomL4X
-I7WniDMYVTfmB0T6IQW+6B6QDYEFQzzPRYL5ZIobgqFF1JERCX0HxR60S10UaQuu5sKXaCV8d0JK
-OKI7Cz6SMeHMJYHtC9+2faQhWooIFDgZL+GoEpBIxr6HKsDB5ZakQcikLR24AY+cqQwIhxZ5qLEE
-fCvRMiABPdezbVtyEbk2/oVTukSjbshSvZATA5GYo36oEASBR66lGivreSmdRYwSNwI3oOfwIpdZ
-KmYRbQCbobJMloFoaJEdOnYIkoOjY85s3/Jji/gRdQXyPPanPB0PLYLuzLPQzNgKYerFgfCYpMKK
-YCuzpjwdj5gBQYbGDrXVjSIegJ2IEFYA8mKB6031d42UziIp4FpX+MQOqe0wuIn5nk1D1F5UfjFV
-SeJhPWIEaWNLxZrEERzEZMcuKltI/dhBjwMpv816EwHGm3JWFedNPXDtSblPE9rOW+jdZ+ITExg1
-3uo7b9RI1KzFw/66GRfS2H0kaYJuX+xwawmddhnmwbWhBoDVRhuQSKO9r2bGdjyoH6qLJ5gtKowL
-SoR+0dyLT/VdzHftMshpVn627aS8a0XfXeSpC3MXpsHXr9V0UlZcFJjrloMV6porkxoLmvnwBlMY
-wRjGPzOM5Xd5WSY07Y1/GOnw9+Fvq/mVsJvOzMGj1eAvpY/4lFRLp75fwLlFpuGqAR0Nh3pRM15t
-R8PculNrR0kptr2Bbo1JcYdRdZuXJjsV+K0Opu4FLlJy3tr+rHESxsYvTlV+AA4M0+UZo2jGbzuz
-eycFaq4/kA/wJYbnj4CKKIAAnjLtSKp9Pc7fN0rfG+U+P6VcTbOkxrovrZ3Ms9OBisKo9qQyMAh3
-grUsNQFnCl1DYurtlDplXL8ijPsBEPeGGmmXj/uE7dvdBbRWRxO1PGNxu1iZULJG6V5tqeT0jjH2
-ohgckDwmmLnpJRIEXyMi6wDXKmc58EgLQfj5oj72eCt76mnY9XbN2YQWUzVaamlUaFUaQPSJBcsz
-XtbYtGocCQJFgQpEVFolVQLXZQ+984za4439eSb0eUJ9NsJrvQBqnioMnzwfUVo2hw2iEabPcor8
-hJ1ErUqdZ8Q4iLIkD6I+4Lgk3f29jpeCJKUwfjiXlTi8+aTwympHZAapcK8+2SBUUYsyXoWgMqY+
-9TDbCNU/H0m5q1kI9m+NxfHDw64QZX4qmCgXimHU9oecn1JRqlOSHoGOH9c5gazjiIMGtuXqwiQq
-5LaXpOnlZYPYKAXbtFuPEu3CAW2SmEBWFNXSWqtNeiTXEHW306v+6Q5tj/l2jWN2mpi3SkbtIBD7
-WNYAIP3wCYbvXmoJqQ9I8+h6h4Foswmu5fyi8evt/EUD1epVI7uvwlDAz/XKL/NMpgmrAM2mz/59
-z/9Ztp//uL9E/0S8L19vb8pVl8ttDuujzPfZkPDnjGSLSqVUlyLgDHV8p3OkOa5T2XLKMoSyaXyX
-CkRIu/xKnsohlcogIAFbWg1lUpQA4lSqdFhAwrl1vfHyp57yC3Mk7332Plt+eSoKSAOd1wJuilHd
-WqFqXWJZmKR4KN9Zd8/XrCd991WCwEzoSdXRb/Pq6xzs3AsUUpazJtvS4ZvrfkK+G6XznXrlc4Ci
-CT//MKiZ/RCti+dTmfpXV1CVz8i4Qen86ok6qTOTXHjeSHNWdxmaEWsbkqo+9NVdw/9p3axZVx3r
-t3Xz98qmuqd2va6ZNZXfX8rgRKnL6wLX1jdVJ1h1IunFiKZuDGtD+6lBgfJBHUTWHvGY1kHbtqBb
-o8dPL29KtNM3peqm5/1cGJ1q14EPuf1yoDAzXgy7vpJ8FNB+iy675vlf8iRbtlWhXVqLKwumxOnW
-91sU6LZbVuzTvo68K6tyWYtdbVQyfPExT1QAHQVRJbBVp+ySbUDR6tKhyCFIoVG2KKX5w2CV6q+V
-X4bvqgsrzUdSZEuF88u/7qo/9Gi4siHn8qkov9EhoT4MWYqPIlN/wJwjlJ3tRXpUrdzbOtp67UQX
-Kug3VPyrj2uWCooZWH5tgKpm6tYB6ZwJAIlXkIeqmQXpikdFsQQTalnqt/u0rknZnDVbgo2btuWy
-I1TmbTSbs9kSjCg2CmEt5kDYXnVQPBd1rdnDvVCiesyLD82ma+NYF4ycVqT5qE0xhWaJG5CpYhEg
-wHQjrhdA8iUTm8wpRFOA+gaYq7/SiwiK9VXI9Ej3qkfSUbZW2XT1GpoEHaxVoobFphdKhTi+qn8s
-R+3UMDpbGtalrpzrLUalTKdcww8mfuZHkS2vln1ufI8+/vaxSCqQD3wMfHUHDQ7/sFaf9j0q76kO
-gBUqDUGNLC+Kkw6OVIyEab/3w0M11pXQ61tObK/mk7OpuRoGmGrGWK6GGtcsoq2puWI9f6RzwIkH
-prajnqy7lzDfqTlvM6YAbLDRu7A0L8VydUURZbXRQvvPm2rWkhYUTNUvLW3N/sil6vcBkb5ED/Jx
-PVWxLzX37XOfg+oa+wbdUrOqLRBP9cejz5efa47reaDj6iuJlzXPzwx6+Lauu6zhZDAYDLTPVGr0
-xgGWHw4w1By0he0JDWlmrPZqfKQhTlELNM6rF+oA5W6lw/RRLAod1sJQZfx3Q0VZqnAe1Sql9nUN
-waJThqHuw7IzS6TlsMHvmbbbNWjtdsYWU55lWqa9+NNd/z9B8Jpc1ahLyzwVyNWJabft41FM6l79
-qkcvxCH/qPlWe6L+GoMealE5KlBv+ju8O2q+J7vsJql+HTYrvWGq3+1cz3d/YEbDz2ea+dEgtpmO
-9v85JJ9Ls07w70q5iuan8q5Nt7vhGK7BtlYIfFilqj8cx3SkqCdPR6ja5S8CoFNfa37BZbCldqAO
-8/kPV23RfN0yyhwk+KALUaFOdBGEaJIuAT1/Qt5i+T3aqXn7hRvzeB4OlPP6qzTX3zYxV4vmpPLY
-1ad2hCkv9PyTfmqoFKGnJK1e1ke/EPmgJsWzYuR+FBfN/KN6rfaouBN7AUT33JfuWv2pViwvXbUW
-0tZCXTQXBV1cnnUnx+rdu+bUWbZF9cmTZ9kVu3oErEv0u7n646bY4N8aXIHxoek064as3chE8T2U
-y9Vd97JZwuKudB7VUDGf15NCXaT7wMADGCGrdmLQXxHatnfNB1HVSavuL/uT9E53DLtdE/UdJI2M
-taFhedW0RC0Ar8bGHkiFaXALPc1SkILtl/P3Wf8rPu+z5bt//Xb3YvXbXLcnq/4Yo9/ucdETjI1C
-rr9klRpCscBn8+skbRmxVhX/f7fRgk3dei/t1R3GMA3kC/20fojRFY82d0+bv3hsYkI27VGneg+A
-GcxocdxuF7udStjdbtF9sJEqiVBT5/BrR5fD9u939h3eefkSYNWp0itfvdzpljubu6fqouaIi0y1
-qL7+C1AkCcw=
-""")
-
-##file distribute_from_egg.py
-DISTRIBUTE_FROM_EGG_PY = convert("""
-eJw9j8tqAzEMRfcG/4MgmxQyptkGusonZBmGoGTUGYFfWPKE6dfXTkM3gqt7rh47OKP3NMF3SQFW
-LlrRU1zhybpAxoKBlIqcrNnBdRjQP3GTocYfzmNrrCPQPN9iwzpxSQfQhWBi0cL3qtRtYIG/4Mv0
-KApY5hooqrOGQ05FQTaxptF9Fnx16Rq0XofjaE1XGXVxHIWK7j8P8EY/rHndLqQ1a0pe3COFgHFy
-hLLdWkDbi/DeEpCjNb3u/zccT2Ob8gtnwVyI
-""")
-
-##file distribute_setup.py
-DISTRIBUTE_SETUP_PY = convert("""
-eJztPGtz2ziS3/UrcHK5SOUkxs7MzV25TlOVmTizrs0mKdvZ/ZC4aIiEJI75GpC0ov311403SEp2
-LrMfruq8O7ZENBqNfncDzMm/1ft2W5WT6XT6S1W1TctpTdIM/marrmUkK5uW5jltMwCaXK3JvurI
-jpYtaSvSNYw0rO3qtqryBmBxlJOaJg90w4JGDkb1fk5+75oWAJK8Sxlpt1kzWWc5oocvgIQWDFbl
-LGkrvie7rN2SrJ0TWqaEpqmYgAsibFvVpFrLlTT+i4vJhMDPmleFQ30sxklW1BVvkdrYUivg/Ufh
-bLBDzv7ogCxCSVOzJFtnCXlkvAFmIA126hw/A1Ra7cq8oumkyDiv+JxUXHCJloTmLeMlBZ5qILvj
-uVg0Aai0Ik1FVnvSdHWd77NyM8FN07rmVc0znF7VKAzBj/v7/g7u76PJ5BbZJfibiIURIyO8g88N
-biXhWS22p6QrqKw3nKauPCNUioliXtXoT822a7PcfNubgTYrmP68LgvaJlszxIoa6THfKXe/wo5q
-yhs2mRgB4hqNllxebSaTlu8vrJCbDJVTDn+6ubyOb65uLyfsa8JgZ1fi+SVKQE4xEGRJ3lclc7Dp
-fXQr4HDCmkZqUsrWJJa2ESdFGr6gfNPM5BT8wa+ALIT9R+wrS7qWrnI2n5F/F0MGjgM7eemgjxJg
-eCiwkeWSnE0OEn0CdgCyAcmBkFOyBiFJgsir6Ic/lcgT8kdXtaBr+LgrWNkC69ewfAmqasHgEWKq
-wRsAMQWSHwDMD68Cu6QmCxEy3ObMH1N4Avgf2D6MD4cdtgXT02YakFMEHMApmP6Q2vRnS4FgHXxQ
-KzZ3felUTdTUFIwyhE8f43+8vrqdkx7TyAtXZm8u377+9O42/vvl9c3Vh/ew3vQs+in64cepGfp0
-/Q4fb9u2vnj5st7XWSRFFVV881L5yOZlA34sYS/Tl9ZtvZxObi5vP328/fDh3U389vVfL9/0FkrO
-z6cTF+jjX3+Lr96//YDj0+mXyd9YS1Pa0sXfpbe6IOfR2eQ9uNkLx8InZvS0mdx0RUHBKshX+Jn8
-pSrYogYKxffJ6w4o5+7nBStolssn77KElY0CfcOkfxF48QEQBBI8tKPJZCLUWLmiEFzDCv7OtW+K
-ke3LcDbTRsG+QoxKhLaKcCDhxWBb1OBSgQfa30TFQ4qfwbPjOPiRaEd5GQaXFgkoxWkTzNVkCVjl
-abxLARHow4a1yS5VGIzbEFBgzFuYE7pTBRQVREgnF1U1K/W2LEys9qH27E2OkrxqGIYja6GbShGL
-mzaBwwCAg5FbB6Jq2m6j3wFeETbHhzmol0Pr57O72XAjEosdsAx7X+3IruIPLsc0tEOlEhqGrSGO
-KzNI3hhlD2aufymr1vNogY7wsFygkMPHF65y9DyMXe8GdBgyB1huBy6N7HgFH9OOa9Vxc5vIoaOH
-hTEBzdAzkwJcOFgFoavqkfUnoXJmbVJBGNWu+5UHoPyNfLjOSlh9TJ+k+lncMuRGvGg5Y0bblOGs
-ugzA2WYTwn9zYuynrWIE+3+z+T9gNkKGIv6WBKQ4gugXA+HYDsJaQUh5W04dMqPFH/h7hfEG1UY8
-WuA3+MUdRH+Kksr9Sb3XusdZ0+Wtr1pAiARWTkDLAwyqaRsxbGngNIOc+uqDSJbC4Neqy1MxS/BR
-Wutmg9apbCSFLamkO1T5+9yk4fGKNkxv23mcspzu1arI6L6SKPjABu7FabOo96dpBP9Hzo6mNvBz
-SiwVmGaoLxAD1xVo2MjD87vZ89mjjAYINntxSoQD+z9Ea+/nAJes1j3hjgSgyCKRfPDAjLfh2ZxY
-+at83C/UnKpkpctUnTLEoiBYCsOR8u4VRWrHy17S1uPA0kncRrkhd7BEA+j4CBOW5/8xB+HEa/rA
-lre8Y8b3FlQ4gKaDSnIn0nmho3TVVDmaMfJiYpdwNA1A8G/ocm9Hm1hyiaGvDeqHTQwmJfLIRqTV
-yN+iSrucNVjafTG7CSxX+oBDP+19cUTjrecDSOXc0oa2LQ89QDCUOHWi/mhZgLMVB8frAjHkl+x9
-EOUcbDVlIA4VWmamjM7f4y0OM89jRqT6CuHUsuTn5RTqMrXebISw/j58jCqV/7Uq13mWtP7iDPRE
-1jOJ8CfhDDxKX3SuXg25j9MhFEIWFO04FN/hAGJ6K3y72FjqtkmcdlL48/IUiqisEaKmj1BCiOrq
-Szkd4sPuT0LLoMVEShk7YN5tsbMhWkKqkwGfeFdifInIx5yBgEbx6W4HJUXFkdQE00JN6DrjTTsH
-4wQ0o9MDQLzXTocsPjn7CqIR+C/llzL8teMcVsn3EjE55TNA7kUAFmEWi5nFUJml0LI2fOWPsbwZ
-sRDQQdIzOsfCP/c8xR1OwdgselHVw6EC+1vs4VlR5JDNjOq1yXZg1fdV+7bqyvS7zfZJMsdIHKRC
-xxxWnHBGW9b3VzFuTligybJExDoSqL83bImfkdilQpZyxFCkv7FtSWOvIrSa5icYX14lol4SrVnF
-+ayV3caSFkxmjfeK9nvICkVytsIW6iPNMw+7Nr2yK1aMg0lTYcvGLQhc2LIUWbFo45jeKaiBmMLI
-vcePe4KNlxCcRLLVq7MylZET+8qUBC+DWUTuJU/ucUWvOAAHwzjTWaSp5PQqLI3kHgUHzXS1B9EV
-TqoyFf3ZmmKsX7E1+htsxSZtR3PbJRb7a7HUaiMthn9JzuCFIyHUjkMlvhKBiGFrXvXIeY5118Qx
-x9Fw6aB4NTa33fwzRnXAfpSXH0dYp23+iR5QSV824rmXrqIgIRhqLDIFpI8MWHogC9egKsHkCaKD
-fal+r2OuvdRZop1dIM9fP1YZanWNppsacmySM4jqpn4x1iOcfDOd45Z8ny2JUlwKB8Mn5JrR9KUI
-rgQjDORnQDpZgck9zPFUYIdKiOFQ+hbQ5KTiHNyFsL4eMtit0GptLxmez7RMwGsV1j/YKcQMgSeg
-DzTtJVWSjYJoyaw5me5W0wGQygsQmR0bOE0lCVhrJMcAAnQN34MH/CPxDhZ14W07V0gY9pILS1Ay
-1tUgOOwG3Neq+hquuzJBd6a8oBh2x0XTd05evHjYzY5kxvJIwtYoarq2jDfatdzI58eS5j4s5s1Q
-ao8lzEjtY1bJBtag+e/+1LRpBgP9lSJcByQ9fG4WeQYOAwuYDs+r8XRIlC9YKD0jtbET3lIAeHZO
-3593WIZKebRGeKJ/Up3VMkO6jzNoVASjad04pKv1rt5qTRdkxegdQjSEOTgM8AFla4P+P0R0o8lD
-Vwt/sZa5NSvlliC265C01k4AMc1UhAAXCg4vVmgBYu16kLVnncCm4YSlJsmy7gS8HyLZa66OtMNe
-+xBuI1axw6qJnfURobFKiPQESDQxasTCTdiNeXsFC9wFY2FUOTzN0/EkcT3moYTSTxzxwHqu23FG
-jNfCM3LNt1FpfreAFHFHhKRpGXBNUlCynY76+BQieBB9ePcmOm3wDA/PhyP8NWgrXyM6GTgxaxLt
-TLlDjVH1l7Fwxq/h2KgiXz+0tBbVIyTiYHSx2/EP65wmbAtmxHSXvJchZA32OYdgPvGfygeIsd5h
-AuR0ahPO3MMKusaaxvNsmOnq+xFOE3qcFKBaHbdH6m+Ic+dut+cF9iMXWHj0A4lefOCHV6AnDy5b
-1n7pZTlg+6+iOnDvELjr9hgw6SnB36pHVAGWM3kAXXUtZtPolHZ0b01WV1D9TNBhzpxIy1HE9+Sp
-5jt8sEFCGR4QHXuw0pq8yDSYJN2smjEnI6ezqqeu+DmIGZYXYAe07+HmxKdmVJVOAPOO5KwNGoJq
-b3x6n59GzRS/UdNCtz047zUW1eEB3rvAjw73NIZj8lAw3llfv4etQHp1tOtqBliGucKYVoJPlocC
-wFZNrOLEgRZ9cGNvNaVOAyLo7cR354c8Td+5H4Izrp6uIVE3J+JIgOKKEwARxNzfMT1xYySW+VgI
-AQY8kAOPXhRARVytfg/Nceos0o30GopNqOhkZHyqgeH5NkX4t8zxXK5LLyjlSJ32lBseEbfmju5Z
-DF2QYNX+UTAJjE4FqvDZZzKy2LQbVaHcsSN1JNRYPwgLfPG0Ljx0NWIuafsGt9cjZeABNS+HLnDU
-90jwI56n78N/RfnLQD6Y5edOJlcx/tIkWSqlvywfM16VaGy9vN4turEc3kJ5R2rGi6xp9M04WUaf
-Ygf0IatroGl6ZBtD+lRuN+rEBcDhPE+KqzWJ3WFxOXoSwYSgnxf12NluHalaDqrHT6WpHhlOI7Cv
-M0/v7ykz7/m7Z7mTycyvWUwEttnliYprEA6TB9TqDL+N1QoHbUVm85e//bZASWI8A6nKz99gK9kg
-Gz8a9A8FqOcGeaunTqA/ULgA8cWD4Zv/6CgrZk94mSc5d8yi/zTTcljhlVBKW8arKDVoL8yIdqwJ
-r4PQ+ots1x6MrSNnkAqz6EnHNWfr7Guoo44NdCbiijCljl8p3zxe9PyRTcbVZUYN+Fl/gJCdsq9O
-DIda6/zizmR1YniuLz2ysisYp/I6pNsjQlB5nVjmf4sFh93KGyFyG/1yAbYBOCJYlbcN9tNRj5cY
-1CSekQZUW9VKOGJmnWdtGOA6y2D2edE7h3SYoBnoLqZw9Q/DJFVYqEoqRg+Xc1BOeYfzZ8mf8V6Z
-R27zWUAid4d0fiutlkpgb9cwHohTFHs5WR2LYsd6tDc1toqZPWIdUisH6tpX+JuEisNT54xVX08d
-M+CD1wCO9eJOyI4FYFUJkDCSdDj5Nqikc8MprZhkSsNYgYHdPQoetn3E1x2ajF+8qDtYyIbhhpxw
-hJkyTN41EWaR/hm3j/FaHnRjehKJy+u96okzEepxfCnctq+zXqpzu6/ZgF/YjHXOyl5/vPpXEmyp
-s0VqfxlQT1813Xtu7osgbskk2wbjgjohKWuZuk+I8RzvIJigiHqb9jNsc/647JMX6aG+drsvqDhF
-mVwadF03a0ZWUbwQpynSN6J6Ct+YfRXE1rx6zFKWyndVsrWCd9+KaZzWSKquIhZze5qjG61uPeSH
-kjHKxqWgsAFD532CAZE8BBq7hDv0bfJ+PtCyherocAXlZWZgo1KOjXuRUW1pZBMRK1MVRMR9uQOb
-KhfynqMVnkcHWvvhLt+oVPVkRRrgGPO3I00f5yrsYZIOJVEjpBzPqRSJ4aGUFHXO75Z8Q1p6MC89
-0lvv8cafN+yuu7phzizRrMXBuvSQ4pDb8f4l64vWLwi+V55DeiEmFTUQyZxDgZx2ZbK1mZ190g+e
-12rE2zhGO1mWinfIJIToSeiXjCRUndWkoPwBbzJUhIrjZ2onrLqNKp6K9BzfaQkWiX8RHhIJvFaU
-s4VqTSzYV/GaGSTQi4KWEMPT4M4geXUICWdJxTWkes9HJJwXP9xhwiIpAFcyNvDKCaV6+OzO9EGw
-Xegms5/9N2vuILnS0yYah7jzNPrSlBGJcxG8YflanhgspxHU+QXDuxjNEqOVPepSl9fF2bqCkAe3
-4l4FBxFKeeHXRF7b0ne39f7sHRH09vjKX7UrsZIvqhRfDpSRBc84BIDbk7CHoBpJBuotOn2gSGkT
-kXvcQGDu2uCbeoB0zQQhg6vrQKjiAHyEyWpHAfp4mQTTXBBR4JuX4v4N8FOQLFqfGg+eLSj7gOi0
-2pMNaxWucOZfSlGJX1LVe/c7VH1QW6h7lpKh8gq/BlCMt5cxXQ6APtyZjEOLZZBp6AGM+vl6Yuoc
-WEl4WohVCsQr09Ww6vz3PN6JJsyjR90RauiaoVRZ76aEhYxoDeVuGqo1fCep6VoKbkX46ygg3tHD
-XtGPP/6XTIuSrAD5ifoMCDz7z7MzJ/vL15GSvUYqtd+kK9cM3QEjDbLfpdm1b7eZSf6bhK/m5EeH
-RWhkOJ/xEDCczxHPq9loXZIUtYCJsCUhASN7LtfnGyINJeZxAC6pD8dOXQaIHth+qTUwwhsUoL9I
-c4AEBDNMxAU2eSNbMwiSQnF5BnAZEzZmi7or5IFZYp95Pa1zxj0ixfnnaBNFS9xn0OA6gpBysgXi
-rIwV3tkQsBPnqs8ATLawsyOAuvnqmOz/4iqxVFGcnAP3cyi4z4fFtrio3Svkx65+CGRxutqEoIRT
-5VvwlUW8RMZ670G5L4aF6k1pGwLE31/MSyL2bVfwpoF6uVbHLGK6NZV+e8gUY6o89r2js7L0aooZ
-iooIK35Nn+elDhjjT4cytKnsHui71g35qF8L/glDNOSjjPeuZ8lL8Tf7pmXFJcbWcydpcgjXTk03
-KLymggtomrVgWpLZPS5/xBEZS+WhE0Sakjkdp8YDF4jELUb1Lnj0QUAJNFy5AgkU0TSNJQ5b72qC
-8WJr0y4Dl9nwkIo7PcugabH114IrEJBr2uWqPLd3Z7csr5c6PUIbF8wWL5wruZPwGOtnwXOo1Rfz
-FnjX0ZDt3YAMMJNp6SPly+mn63dTS6KmfPTur6Rf/3MDmNTgjVgRmNXN1speCxxXbLUDJai5ztzU
-jlyh60S2Av6onMMYFcUu6qYEjqeuGmnxCw0qKDjGAzedrUZdHft3CoTPvqTNXkFpldL/TsLSV1PZ
-/zn6ipR/wVrbr/fUM4zhy8vHvBF4rExcM8RaLRbtwDhGPsSxepHeZMCCOzDhfwBqDMd7
-""")
-
-##file activate.sh
-ACTIVATE_SH = convert("""
-eJytVVFvokAQfudXTLEPtTlLeo9tvMSmJpq02hSvl7u2wRUG2QR2DSxSe7n/frOACEVNLlceRHa+
-nfl25pvZDswCnoDPQ4QoTRQsENIEPci4CsBMZBq7CAsuLOYqvmYKTTj3YxnBgiXBudGBjUzBZUJI
-BXEqgCvweIyuCjeG4eF2F5x14bcB9KQiQQWrjSddI1/oQIx6SYYeoFjzWIoIhYI1izlbhJjkKO7D
-M/QEmKfO9O7WeRo/zr4P7pyHwWxkwitcgwpQ5Ej96OX+PmiFwLeVjFUOrNYKaq1Nud3nR2n8nI2m
-k9H0friPTGVsUdptaxGrTEfpNVFEskxpXtUkkCkl1UNF9cgLBkx48J4EXyALuBtAwNYIjF5kcmUU
-abMKmMq1ULoiRbgsDEkTSsKSGFCJ6Z8vY/2xYiSacmtyAfCDdCNTVZoVF8vSTQOoEwSnOrngBkws
-MYGMBMg8/bMBLSYKS7pYEXP0PqT+ZmBT0Xuy+Pplj5yn4aM9nk72JD8/Wi+Gr98sD9eWSMOwkapD
-BbUv91XSvmyVkICt2tmXR4tWmrcUCsjWOpw87YidEC8i0gdTSOFhouJUNxR+4NYBG0MftoCTD9F7
-2rTtxG3oPwY1b2HncYwhrlmj6Wq924xtGDWqfdNxap+OYxplEurnMVo9RWks+rH8qKEtx7kZT5zJ
-4H7oOFclrN6uFe+d+nW2aIUsSgs/42EIPuOhXq+jEo3S6tX6w2ilNkDnIpHCWdEQhFgwj9pkk7FN
-l/y5eQvRSIQ5+TrL05lewxWpt/Lbhes5cJF3mLET1MGhcKCF+40tNWnUulxrpojwDo2sObdje3Bz
-N3QeHqf3D7OjEXMVV8LN3ZlvuzoWHqiUcNKHtwNd0IbvPGKYYM31nPKCgkUILw3KL+Y8l7aO1ArS
-Ad37nIU0fCj5NE5gQCuC5sOSu+UdI2NeXg/lFkQIlFpdWVaWZRfvqGiirC9o6liJ9FXGYrSY9mI1
-D/Ncozgn13vJvsznr7DnkJWXsyMH7e42ljdJ+aqNDF1bFnKWFLdj31xtaJYK6EXFgqmV/ymD/ROG
-+n8O9H8f5vsGOWXsL1+1k3g=
-""")
-
-##file activate.fish
-ACTIVATE_FISH = convert("""
-eJyVVWFv2jAQ/c6vuBoqQVWC9nVSNVGVCaS2VC2rNLWVZZILWAs2s52wVvvxsyEJDrjbmgpK7PP5
-3bt3d22YLbmGlGcIq1wbmCPkGhPYcLMEEsGciwGLDS+YwSjlekngLFVyBe73GXSXxqw/DwbuTS8x
-yyKpFr1WG15lDjETQhpQuQBuIOEKY5O9tlppLqxHKSDByjVAPwEy+mXtCq5MzjIUBTCRgEKTKwFG
-gpBqxTLYXgN2myspVigMaYF92tZSowGZJf4mFExxNs9Qb614CgZtmH0BpEOn11f0cXI/+za8pnfD
-2ZjA1sg9zlV/8QvcMhxbNu0QwgYokn/d+n02nt6Opzcjcnx1vXcIoN74O4ymWQXmHURfJw9jenc/
-vbmb0enj6P5+cuVhqlKm3S0u2XRtRbA2QQAhV7VhBF0rsgUX9Ur1rBUXJgVSy8O751k8mzY5OrKH
-RW3eaQhYGTr8hrXO59ALhxQ83mCsDLAid3T72CCSdJhaFE+fXgicXAARUiR2WeVO37gH3oYHzFKo
-9k7CaPZ1UeNwH1tWuXA4uFKYYcEa8vaKqXl7q1UpygMPhFLvlVKyNzsSM3S2km7UBOl4xweUXk5u
-6e3wZmQ9leY1XE/Ili670tr9g/5POBBpGIJXCCF79L1siarl/dbESa8mD8PL61GpzqpzuMS7tqeB
-1YkALrRBloBMbR9yLcVx7frQAgUqR7NZIuzkEu110gbNit1enNs82Rx5utq7Z3prU78HFRgulqNC
-OTwbqJa9vkJFclQgZSjbKeBgSsUtCtt9D8OwAbIVJuewQdfvQRaoFE9wd1TmCuRG7OgJ1bVXGHc7
-z5WDL/WW36v2oi37CyVBak61+yPBA9C1qqGxzKQqZ0oPuocU9hpud0PIp8sDHkXR1HKkNlzjuUWA
-a0enFUyzOWZA4yXGP+ZMI3Tdt2OuqU/SO4q64526cPE0A7ZyW2PMbWZiZ5HamIZ2RcCKLXhcDl2b
-vXL+eccQoRzem80mekPDEiyiWK4GWqZmwxQOmPM0eIfgp1P9cqrBsewR2p/DPMtt+pfcYM+Ls2uh
-hALufTAdmGl8B1H3VPd2af8fQAc4PgqjlIBL9cGQqNpXaAwe3LrtVn8AkZTUxg==
-""")
-
-##file activate.csh
-ACTIVATE_CSH = convert("""
-eJx9VG1P2zAQ/u5fcYQKNgTNPtN1WxlIQ4KCUEGaxuQ6yYVYSuzKdhqVX7+zk3bpy5YPUXL3PPfc
-ne98DLNCWshliVDV1kGCUFvMoJGugMjq2qQIiVSxSJ1cCofD1BYRnOVGV0CfZ0N2DD91DalQSjsw
-tQLpIJMGU1euvPe7QeJlkKzgWixlhnAt4aoUVsLnLBiy5NtbJWQ5THX1ZciYKKWwkOFaE04dUm6D
-r/zh7pq/3D7Nnid3/HEy+wFHY/gEJydg0aFaQrBFgz1c5DG1IhTs+UZgsBC2GMFBlaeH+8dZXwcW
-VPvCjXdlAvCfQsE7al0+07XjZvrSCUevR5dnkVeKlFYZmUztG4BdzL2u9KyLVabTU0bdfg7a0hgs
-cSmUg6UwUiQl2iHrcbcVGNvPCiLOe7+cRwG13z9qRGgx2z6DHjfm/Op2yqeT+xvOLzs0PTKHDz2V
-tkckFHoQfQRXoGJAj9el0FyJCmEMhzgMS4sB7KPOE2ExoLcSieYwDvR+cP8cg11gKkVJc2wRcm1g
-QhYFlXiTaTfO2ki0fQoiFM4tLuO4aZrhOzqR4dIPcWx17hphMBY+Srwh7RTyN83XOWkcSPh1Pg/k
-TXX/jbJTbMtUmcxZ+/bbqOsy82suFQg/BhdSOTRhMNBHlUarCpU7JzBhmkKmRejKOQzayQe6MWoa
-n1wqWmuh6LZAaHxcdeqIlVLhIBJdO9/kbl0It2oEXQj+eGjJOuvOIR/YGRqvFhttUB2XTvLXYN2H
-37CBdbW2W7j2r2+VsCn0doVWcFG1/4y1VwBjfwAyoZhD
-""")
-
-##file activate.bat
-ACTIVATE_BAT = convert("""
-eJx9UdEKgjAUfW6wfxjiIH+hEDKUFHSKLCMI7kNOEkIf9P9pTJ3OLJ/03HPPPed4Es9XS9qqwqgT
-PbGKKOdXL4aAFS7A4gvAwgijuiKlqOpGlATS2NeMLE+TjJM9RkQ+SmqAXLrBo1LLIeLdiWlD6jZt
-r7VNubWkndkXaxg5GO3UaOOKS6drO3luDDiO5my3iA0YAKGzPRV1ack8cOdhysI0CYzIPzjSiH5X
-0QcvC8Lfaj0emsVKYF2rhL5L3fCkVjV76kShi59NHwDniAHzkgDgqBcwOgTMx+gDQQqXCw==
-""")
-
-##file deactivate.bat
-DEACTIVATE_BAT = convert("""
-eJxzSE3OyFfIT0vj4ipOLVEI8wwKCXX0iXf1C7Pl4spMU0hJTcvMS01RiPf3cYmHyQYE+fsGhCho
-cCkAAUibEkTEVhWLMlUlLk6QGixStlyaeCyJDPHw9/Pw93VFsQguim4ZXAJoIUw5DhX47XUM8UCx
-EchHtwsohN1bILUgw61c/Vy4AJYPYm4=
-""")
-
-##file activate.ps1
-ACTIVATE_PS = convert("""
-eJylWdmS40Z2fVeE/oHT6rCloNUEAXDThB6wAyQAEjsB29GBjdgXYiWgmC/zgz/Jv+AEWNVd3S2N
-xuOKYEUxM+/Jmzfvcm7W//zXf/+wUMOoXtyi1F9kbd0sHH/hFc2iLtrK9b3FrSqyxaVQwr8uhqJd
-uHaeg9mqzRdR8/13Pyy8qPLdJh0+LMhi0QCoXxYfFh9WtttEnd34H8p6/f1300KauwrULws39e18
-0ZaLNm9rgN/ZVf3h++/e124Vlc0vKsspHy+Yyi5+XbzPhijvCtduoiL/kA1ukWV27n0o7Sb8LIFj
-CvWR5GQgUJdp1Pw8TS9+rPy6SDv/+e3d+0+4qw8f3v20+PliV37efEYBAB9FTKC+RHn/Cfxn3rdv
-00Fube5O+iyCtHDs9BfPfz3q4sfFv9d91Ljhfy7ei0VO+nVTtdOkv/jpt0l2AX6iG1jXgKnnDuD4
-ke2k/i8fzzz5UedkVcP4pwF+Wvz2FJl+3vt598urXf5Y6LNA5WcFOP7r0sW7b9a+W/xcu0Xpv5zk
-Kfq3P9Dz9di/fCxS72MXVU1rpx9L4Bxl85Wmn5a+zP76Zuh3pL9ROWr87PN+//GHIl+oOtvn9XSU
-qH+p0gQBFnx1uV+JLH5O5zv+PXW+WepXVVHZT0+oQezkIATcIm+ivPV/z5J/+cYj3ir4w0Lx09vC
-e5n/y5/Y5LPPfdrqb88ga/PabxZRVfmp39l588m/6u+/e+OpP+dF7n1WZpJ9//Z4v372fDDz9eHB
-7Juvs/BLMHzrxL9+9twXpJfhd1/DrpQ5Euu/vlss3wp9HXC/54C/Ld69m6zwdx3tC0d8daSv0V8B
-n4b9YYF53sJelJV/ix6LZspw/sJtqyl5LJ5r/23htA1Imfm/gt9R7dqVB1LjhydAX4Gb+zksQF59
-9+P7H//U+376afFuvh2/T6P85Xr/5c8C6OXyFY4BGuN+EE0+GeR201b+wkkLN5mmBY5TfMw8ngqL
-CztXxCSXKMCYrRIElWkEJlEPYsSOeKBVZCAQTKBhApMwRFQzmCThE0YQu2CdEhgjbgmk9GluHpfR
-/hhwJCZhGI5jt5FsAkOrObVyE6g2y1snyhMGFlDY1x+BoHpCMulTj5JYWNAYJmnKpvLxXgmQ8az1
-4fUGxxcitMbbhDFcsiAItg04E+OSBIHTUYD1HI4FHH4kMREPknuYRMyhh3AARWMkfhCketqD1CWJ
-mTCo/nhUScoQcInB1hpFhIKoIXLo5jLpwFCgsnLCx1QlEMlz/iFEGqzH3vWYcpRcThgWnEKm0QcS
-rA8ek2a2IYYeowUanOZOlrbWSJUC4c7y2EMI3uJPMnMF/SSXdk6E495VLhzkWHps0rOhKwqk+xBI
-DhJirhdUCTamMfXz2Hy303hM4DFJ8QL21BcPBULR+gcdYxoeiDqOFSqpi5B5PUISfGg46gFZBPo4
-jdh8lueaWuVSMTURfbAUnLINr/QYuuYoMQV6l1aWxuZVTjlaLC14UzqZ+ziTGDzJzhiYoPLrt3uI
-tXkVR47kAo09lo5BD76CH51cTt1snVpMOttLhY93yxChCQPI4OBecS7++h4p4Bdn4H97bJongtPk
-s9gQnXku1vzsjjmX4/o4YUDkXkjHwDg5FXozU0fW4y5kyeYW0uJWlh536BKr0kMGjtzTkng6Ep62
-uTWnQtiIqKnEsx7e1hLtzlXs7Upw9TwEnp0t9yzCGgUJIZConx9OHJArLkRYW0dW42G9OeR5Nzwk
-yk1mX7du5RGHT7dka7N3AznmSif7y6tuKe2N1Al/1TUPRqH6E2GLVc27h9IptMLkCKQYRqPQJgzV
-2m6WLsSipS3v3b1/WmXEYY1meLEVIU/arOGVkyie7ZsH05ZKpjFW4cpY0YkjySpSExNG2TS8nnJx
-nrQmWh2WY3cP1eISP9wbaVK35ZXc60yC3VN/j9n7UFoK6zvjSTE2+Pvz6Mx322rnftfP8Y0XKIdv
-Qd7AfK0nexBTMqRiErvCMa3Hegpfjdh58glW2oNMsKeAX8x6YJLZs9K8/ozjJkWL+JmECMvhQ54x
-9rsTHwcoGrDi6Y4I+H7yY4/rJVPAbYymUH7C2D3uiUS3KQ1nrCAUkE1dJMneDQIJMQQx5SONxoEO
-OEn1/Ig1eBBUeEDRuOT2WGGGE4bNypBLFh2PeIg3bEbg44PHiqNDbGIQm50LW6MJU62JHCGBrmc9
-2F7WBJrrj1ssnTAK4sxwRgh5LLblhwNAclv3Gd+jC/etCfyfR8TMhcWQz8TBIbG8IIyAQ81w2n/C
-mHWAwRzxd3WoBY7BZnsqGOWrOCKwGkMMNfO0Kci/joZgEocLjNnzgcmdehPHJY0FudXgsr+v44TB
-I3jnMGnsK5veAhgi9iXGifkHMOC09Rh9cAw9sQ0asl6wKMk8mpzFYaaDSgG4F0wisQDDBRpjCINg
-FIxhlhQ31xdSkkk6odXZFpTYOQpOOgw9ugM2cDQ+2MYa7JsEirGBrOuxsQy5nPMRdYjsTJ/j1iNw
-FeSt1jY2+dd5yx1/pzZMOQXUIDcXeAzR7QlDRM8AMkUldXOmGmvYXPABjxqkYKO7VAY6JRU7kpXr
-+Epu2BU3qFFXClFi27784LrDZsJwbNlDw0JzhZ6M0SMXE4iBHehCpHVkrQhpTFn2dsvsZYkiPEEB
-GSEAwdiur9LS1U6P2U9JhGp4hnFpJo4FfkdJHcwV6Q5dV1Q9uNeeu7rV8PAjwdFg9RLtroifOr0k
-uOiRTo/obNPhQIf42Fr4mtThWoSjitEdAmFW66UCe8WFjPk1YVNpL9srFbond7jrLg8tqAasIMpy
-zkH0SY/6zVAwJrEc14zt14YRXdY+fcJ4qOd2XKB0/Kghw1ovd11t2o+zjt+txndo1ZDZ2T+uMVHT
-VSXhedBAHoJIID9xm6wPQI3cXY+HR7vxtrJuCKh6kbXaW5KkVeJsdsjqsYsOwYSh0w5sMbu7LF8J
-5T7U6LJdiTx+ca7RKlulGgS5Z1JSU2Llt32cHFipkaurtBrvNX5UtvNZjkufZ/r1/XyLl6yOpytL
-Km8Fn+y4wkhlqZP5db0rooqy7xdL4wxzFVTX+6HaxuQJK5E5B1neSSovZ9ALB8091dDbbjVxhWNY
-Ve5hn1VnI9OF0wpvaRm7SZuC1IRczwC7GnkhPt3muHV1YxUJfo+uh1sYnJy+vI0ZwuPV2uqWJYUH
-bmBsi1zmFSxHrqwA+WIzLrHkwW4r+bad7xbOzJCnKIa3S3YvrzEBK1Dc0emzJW+SqysQfdEDorQG
-9ZJlbQzEHQV8naPaF440YXzJk/7vHGK2xwuP+Gc5xITxyiP+WQ4x18oXHjFzCBy9kir1EFTAm0Zq
-LYwS8MpiGhtfxiBRDXpxDWxk9g9Q2fzPPAhS6VFDAc/aiNGatUkPtZIStZFQ1qD0IlJa/5ZPAi5J
-ySp1ETDomZMnvgiysZSBfMikrSDte/K5lqV6iwC5q7YN9I1dBZXUytDJNqU74MJsUyNNLAPopWK3
-tzmLkCiDyl7WQnj9sm7Kd5kzgpoccdNeMw/6zPVB3pUwMgi4C7hj4AMFAf4G27oXH8NNT9zll/sK
-S6wVlQwazjxWKWy20ZzXb9ne8ngGalPBWSUSj9xkc1drsXkZ8oOyvYT3e0rnYsGwx85xZB9wKeKg
-cJKZnamYwiaMymZvzk6wtDUkxmdUg0mPad0YHtvzpjEfp2iMxvORhnx0kCVLf5Qa43WJsVoyfEyI
-pzmf8ruM6xBr7dnBgzyxpqXuUPYaKahOaz1LrxNkS/Q3Ae5AC+xl6NbxAqXXlzghZBZHmOrM6Y6Y
-ctAkltwlF7SKEsShjVh7QHuxMU0a08/eiu3x3M+07OijMcKFFltByXrpk8w+JNnZpnp3CfgjV1Ax
-gUYCnWwYow42I5wHCcTzLXK0hMZN2DrPM/zCSqe9jRSlJnr70BPE4+zrwbk/xVIDHy2FAQyHoomT
-Tt5jiM68nBQut35Y0qLclLiQrutxt/c0OlSqXAC8VrxW97lGoRWzhOnifE2zbF05W4xuyhg7JTUL
-aqJ7SWDywhjlal0b+NLTpERBgnPW0+Nw99X2Ws72gOL27iER9jgzj7Uu09JaZ3n+hmCjjvZpjNst
-vOWWTbuLrg+/1ltX8WpPauEDEvcunIgTxuMEHweWKCx2KQ9DU/UKdO/3za4Szm2iHYL+ss9AAttm
-gZHq2pkUXFbV+FiJCKrpBms18zH75vax5jSo7FNunrVWY3Chvd8KKnHdaTt/6ealwaA1x17yTlft
-8VBle3nAE+7R0MScC3MJofNCCkA9PGKBgGMYEwfB2QO5j8zUqa8F/EkWKCzGQJ5EZ05HTly1B01E
-z813G5BY++RZ2sxbQS8ZveGPJNabp5kXAeoign6Tlt5+L8i5ZquY9+S+KEUHkmYMRFBxRrHnbl2X
-rVemKnG+oB1yd9+zT+4c43jQ0wWmQRR6mTCkY1q3VG05Y120ZzKOMBe6Vy7I5Vz4ygPB3yY4G0FP
-8RxiMx985YJPXsgRU58EuHj75gygTzejP+W/zKGe78UQN3yOJ1aMQV9hFH+GAfLRsza84WlPLAI/
-9G/5JdcHftEfH+Y3/fHUG7/o8bv98dzzy3e8S+XCvgqB+VUf7sH0yDHpONdbRE8tAg9NWOzcTJ7q
-TuAxe/AJ07c1Rs9okJvl1/0G60qvbdDzz5zO0FuPFQIHNp9y9Bd1CufYVx7dB26mAxwa8GMNrN/U
-oGbNZ3EQ7inLzHy5tRg9AXJrN8cB59cCUBeCiVO7zKM0jU0MamhnRThkg/NMmBOGb6StNeD9tDfA
-7czsAWopDdnGoXUHtA+s/k0vNPkBcxEI13jVd/axp85va3LpwGggXXWw12Gwr/JGAH0b8CPboiZd
-QO1l0mk/UHukud4C+w5uRoNzpCmoW6GbgbMyaQNkga2pQINB18lOXOCJzSWPFOhZcwzdgrsQnne7
-nvjBi+7cP2BbtBeDOW5uOLGf3z94FasKIguOqJl+8ss/6Kumns4cuWbqq5592TN/RNIbn5Qo6qbi
-O4F0P9txxPAwagqPlftztO8cWBzdN/jz3b7GD6JHYP/Zp4ToAMaA74M+EGSft3hEGMuf8EwjnTk/
-nz/P7SLipB/ogQ6xNX0fDqNncMCfHqGLCMM0ZzFa+6lPJYQ5p81vW4HkCvidYf6kb+P/oB965g8K
-C6uR0rdjX1DNKc5pOSTquI8uQ6KXxYaKBn+30/09tK4kMpJPgUIQkbENEPbuezNPPje2Um83SgyX
-GTCJb6MnGVIpgncdQg1qz2bvPfxYD9fewCXDomx9S+HQJuX6W3VAL+v5WZMudRQZk9ZdOk6GIUtC
-PqEb/uwSIrtR7/edzqgEdtpEwq7p2J5OQV+RLrmtTvFwFpf03M/VrRyTZ73qVod7v7Jh2Dwe5J25
-JqFOU2qEu1sP+CRotklediycKfLjeIZzjJQsvKmiGSNQhxuJpKa+hoWUizaE1PuIRGzJqropwgVB
-oo1hr870MZLgnXF5ZIpr6mF0L8aSy2gVnTAuoB4WEd4d5NPVC9TMotYXERKlTcwQ2KiB/C48AEfH
-Qbyq4CN8xTFnTvf/ebOc3isnjD95s0QF0nx9s+y+zMmz782xL0SgEmRpA3x1w1Ff9/74xcxKEPdS
-IEFTz6GgU0+BK/UZ5Gwbl4gZwycxEw+Kqa5QmMkh4OzgzEVPnDAiAOGBFaBW4wkDmj1G4RyElKgj
-NlLCq8zsp085MNh/+R4t1Q8yxoSv8PUpTt7izZwf2BTHZZ3pIZpUIpuLkL1nNL6sYcHqcKm237wp
-T2+RCjgXweXd2Zp7ZM8W6dG5bZsqo0nrJBTx8EC0+CQQdzEGnabTnkzofu1pYkWl4E7XSniECdxy
-vLYavPMcL9LW5SToJFNnos+uqweOHriUZ1ntIYZUonc7ltEQ6oTRtwOHNwez2sVREskHN+bqG3ua
-eaEbJ8XpyO8CeD9QJc8nbLP2C2R3A437ISUNyt5Yd0TbDNcl11/DSsOzdbi/VhCC0KE6v1vqVNkq
-45ZnG6fiV2NwzInxCNth3BwL0+8814jE6+1W1EeWtpWbSZJOJNYXmWRXa7vLnAljE692eHjZ4y5u
-y1u63De0IzKca7As48Z3XshVF+3XiLNz0JIMh/JOpbiNLlMi672uO0wYzOCZjRxcxj3D+gVenGIE
-MvFUGGXuRps2RzMcgWIRolHXpGUP6sMsQt1hspUBnVKUn/WQj2u6j3SXd9Xz0QtEzoM7qTu5y7gR
-q9gNNsrlEMLdikBt9bFvBnfbUIh6voTw7eDsyTmPKUvF0bHqWLbHe3VRHyRZnNeSGKsB73q66Vsk
-taxWYmwz1tYVFG/vOQhlM0gUkyvIab3nv2caJ1udU1F3pDMty7stubTE4OJqm0i0ECfrJIkLtraC
-HwRWKzlqpfhEIqYH09eT9WrOhQyt8YEoyBlnXtAT37WHIQ03TIuEHbnRxZDdLun0iok9PUC79prU
-m5beZzfQUelEXnhzb/pIROKx3F7qCttYIFGh5dXNzFzID7u8vKykA8Uejf7XXz//S4nKvW//ofS/
-QastYw==
-""")
-
-##file distutils-init.py
-DISTUTILS_INIT = convert("""
-eJytV1uL4zYUfvevOE0ottuMW9q3gVDa3aUMXXbLMlDKMBiNrSTqOJKRlMxkf33PkXyRbGe7Dw2E
-UXTu37lpxLFV2oIyifAncxmOL0xLIfcG+gv80x9VW6maw7o/CANSWWBwFtqeWMPlGY6qPjV8A0bB
-C4eKSTgZ5LRgFeyErMEeOBhbN+Ipgeizhjtnhkn7DdyjuNLPoCS0l/ayQTG0djwZC08cLXozeMss
-aG5EzQ0IScpnWtHSTXuxByV/QCmxE7y+eS0uxWeoheaVVfqSJHiU7Mhhi6gULbOHorshkrEnKxpT
-0n3A8Y8SMpuwZx6aoix3ouFlmW8gHRSkeSJ2g7hU+kiHLDaQw3bmRDaTGfTnty7gPm0FHbIBg9U9
-oh1kZzAFLaue2R6htPCtAda2nGlDSUJ4PZBgCJBGVcwKTAMz/vJiLD+Oin5Z5QlvDPdulC6EsiyE
-NFzb7McNTKJzbJqzphx92VKRFY1idenzmq3K0emRcbWBD0ryqc4NZGmKOOOX9Pz5x+/l27tP797c
-f/z0d+4NruGNai8uAM0bfsYaw8itFk8ny41jsfpyO+BWlpqfhcG4yxLdi/0tQqoT4a8Vby382mt8
-p7XSo7aWGdPBc+b6utaBmCQ7rQKQoWtAuthQCiold2KfJIPTT8xwg9blPumc+YDZC/wYGdAyHpJk
-vUbHbHWAp5No6pK/WhhLEWrFjUwtPEv1Agf8YmnsuXUQYkeZoHm8ogP16gt2uHoxcEMdf2C6pmbw
-hUMsWGhanboh4IzzmsIpWs134jVPqD/c74bZHdY69UKKSn/+KfVhxLgUlToemayLMYQOqfEC61bh
-cbhwaqoGUzIyZRFHPmau5juaWqwRn3mpWmoEA5nhzS5gog/5jbcFQqOZvmBasZtwYlG93k5GEiyw
-buHhMWLjDarEGpMGB2LFs5nIJkhp/nUmZneFaRth++lieJtHepIvKgx6PJqIlD9X2j6pG1i9x3pZ
-5bHuCPFiirGHeO7McvoXkz786GaKVzC9DSpnOxJdc4xm6NSVq7lNEnKdVlnpu9BNYoKX2Iq3wvgh
-gGEUM66kK6j4NiyoneuPLSwaCWDxczgaolEWpiMyDVDb7dNuLAbriL8ig8mmeju31oNvQdpnvEPC
-1vAXbWacGRVrGt/uXN/gU0CDDwgooKRrHfTBb1/s9lYZ8ZqOBU0yLvpuP6+K9hLFsvIjeNhBi0KL
-MlOuWRn3FRwx5oHXjl0YImUx0+gLzjGchrgzca026ETmYJzPD+IpuKzNi8AFn048Thd63OdD86M6
-84zE8yQm0VqXdbbgvub2pKVnS76icBGdeTHHXTKspUmr4NYo/furFLKiMdQzFjHJNcdAnMhltBJK
-0/IKX3DVFqvPJ2dLE7bDBkH0l/PJ29074+F0CsGYOxsb7U3myTUncYfXqnLLfa6sJybX4g+hmcjO
-kMRBfA1JellfRRKJcyRpxdS4rIl6FdmQCWjo/o9Qz7yKffoP4JHjOvABcRn4CZIT2RH4jnxmfpVG
-qgLaAvQBNfuO6X0/Ux02nb4FKx3vgP+XnkX0QW9pLy/NsXgdN24dD3LxO2Nwil7Zlc1dqtP3d7/h
-kzp1/+7hGBuY4pk0XD/0Ao/oTe/XGrfyM773aB7iUhgkpy+dwAMalxMP0DrBcsVw/6p25+/hobP9
-GBknrWExDhLJ1bwt1NcCNblaFbMKCyvmX0PeRaQ=
-""")
-
-##file distutils.cfg
-DISTUTILS_CFG = convert("""
-eJxNj00KwkAMhfc9xYNuxe4Ft57AjYiUtDO1wXSmNJnK3N5pdSEEAu8nH6lxHVlRhtDHMPATA4uH
-xJ4EFmGbvfJiicSHFRzUSISMY6hq3GLCRLnIvSTnEefN0FIjw5tF0Hkk9Q5dRunBsVoyFi24aaLg
-9FDOlL0FPGluf4QjcInLlxd6f6rqkgPu/5nHLg0cXCscXoozRrP51DRT3j9QNl99AP53T2Q=
-""")
-
-##file activate_this.py
-ACTIVATE_THIS = convert("""
-eJyNU01v2zAMvetXEB4K21jmDOstQA4dMGCHbeihlyEIDMWmG62yJEiKE//7kXKdpN2KzYBt8euR
-fKSyLPs8wiEo8wh4wqZTGou4V6Hm0wJa1cSiTkJdr8+GsoTRHuCotBayiWqQEYGtMCgfD1KjGYBe
-5a3p0cRKiAe2NtLADikftnDco0ko/SFEVgEZ8aRC5GLux7i3BpSJ6J1H+i7A2CjiHq9z7JRZuuQq
-siwTIvpxJYCeuWaBpwZdhB+yxy/eWz+ZvVSU8C4E9FFZkyxFsvCT/ZzL8gcz9aXVE14Yyp2M+2W0
-y7n5mp0qN+avKXvbsyyzUqjeWR8hjGE+2iCE1W1tQ82hsCZN9UzlJr+/e/iab8WfqsmPI6pWeUPd
-FrMsd4H/55poeO9n54COhUs+sZNEzNtg/wanpjpuqHJaxs76HtZryI/K3H7KJ/KDIhqcbJ7kI4ar
-XL+sMgXnX0D+Te2Iy5xdP8yueSlQB/x/ED2BTAtyE3K4SYUN6AMNfbO63f4lBW3bUJPbTL+mjSxS
-PyRfJkZRgj+VbFv+EzHFi5pKwUEepa4JslMnwkowSRCXI+m5XvEOvtuBrxHdhLalG0JofYBok6qj
-YdN2dEngUlbC4PG60M1WEN0piu7Nq7on0mgyyUw3iV1etLo6r/81biWdQ9MWHFaePWZYaq+nmp+t
-s3az+sj7eA0jfgPfeoN1
-""")
-
-MH_MAGIC = 0xfeedface
-MH_CIGAM = 0xcefaedfe
-MH_MAGIC_64 = 0xfeedfacf
-MH_CIGAM_64 = 0xcffaedfe
-FAT_MAGIC = 0xcafebabe
-BIG_ENDIAN = '>'
-LITTLE_ENDIAN = '<'
-LC_LOAD_DYLIB = 0xc
-maxint = majver == 3 and getattr(sys, 'maxsize') or getattr(sys, 'maxint')
-
-
-class fileview(object):
-    """
-    A proxy for file-like objects that exposes a given view of a file.
-    Modified from macholib.
-    """
-
-    def __init__(self, fileobj, start=0, size=maxint):
-        if isinstance(fileobj, fileview):
-            self._fileobj = fileobj._fileobj
-        else:
-            self._fileobj = fileobj
-        self._start = start
-        self._end = start + size
-        self._pos = 0
-
-    def __repr__(self):
-        return '<fileview [%d, %d] %r>' % (
-            self._start, self._end, self._fileobj)
-
-    def tell(self):
-        return self._pos
-
-    def _checkwindow(self, seekto, op):
-        if not (self._start <= seekto <= self._end):
-            raise IOError("%s to offset %d is outside window [%d, %d]" % (
-                op, seekto, self._start, self._end))
-
-    def seek(self, offset, whence=0):
-        seekto = offset
-        if whence == os.SEEK_SET:
-            seekto += self._start
-        elif whence == os.SEEK_CUR:
-            seekto += self._start + self._pos
-        elif whence == os.SEEK_END:
-            seekto += self._end
-        else:
-            raise IOError("Invalid whence argument to seek: %r" % (whence,))
-        self._checkwindow(seekto, 'seek')
-        self._fileobj.seek(seekto)
-        self._pos = seekto - self._start
-
-    def write(self, bytes):
-        here = self._start + self._pos
-        self._checkwindow(here, 'write')
-        self._checkwindow(here + len(bytes), 'write')
-        self._fileobj.seek(here, os.SEEK_SET)
-        self._fileobj.write(bytes)
-        self._pos += len(bytes)
-
-    def read(self, size=maxint):
-        assert size >= 0
-        here = self._start + self._pos
-        self._checkwindow(here, 'read')
-        size = min(size, self._end - here)
-        self._fileobj.seek(here, os.SEEK_SET)
-        bytes = self._fileobj.read(size)
-        self._pos += len(bytes)
-        return bytes
-
-
-def read_data(file, endian, num=1):
-    """
-    Read a given number of 32-bits unsigned integers from the given file
-    with the given endianness.
-    """
-    res = struct.unpack(endian + 'L' * num, file.read(num * 4))
-    if len(res) == 1:
-        return res[0]
-    return res
-
-
-def mach_o_change(path, what, value):
-    """
-    Replace a given name (what) in any LC_LOAD_DYLIB command found in
-    the given binary with a new name (value), provided it's shorter.
-    """
-
-    def do_macho(file, bits, endian):
-        # Read Mach-O header (the magic number is assumed read by the caller)
-        cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = read_data(file, endian, 6)
-        # 64-bits header has one more field.
-        if bits == 64:
-            read_data(file, endian)
-        # The header is followed by ncmds commands
-        for n in range(ncmds):
-            where = file.tell()
-            # Read command header
-            cmd, cmdsize = read_data(file, endian, 2)
-            if cmd == LC_LOAD_DYLIB:
-                # The first data field in LC_LOAD_DYLIB commands is the
-                # offset of the name, starting from the beginning of the
-                # command.
-                name_offset = read_data(file, endian)
-                file.seek(where + name_offset, os.SEEK_SET)
-                # Read the NUL terminated string
-                load = file.read(cmdsize - name_offset).decode()
-                load = load[:load.index('\0')]
-                # If the string is what is being replaced, overwrite it.
-                if load == what:
-                    file.seek(where + name_offset, os.SEEK_SET)
-                    file.write(value.encode() + '\0'.encode())
-            # Seek to the next command
-            file.seek(where + cmdsize, os.SEEK_SET)
-
-    def do_file(file, offset=0, size=maxint):
-        file = fileview(file, offset, size)
-        # Read magic number
-        magic = read_data(file, BIG_ENDIAN)
-        if magic == FAT_MAGIC:
-            # Fat binaries contain nfat_arch Mach-O binaries
-            nfat_arch = read_data(file, BIG_ENDIAN)
-            for n in range(nfat_arch):
-                # Read arch header
-                cputype, cpusubtype, offset, size, align = read_data(file, BIG_ENDIAN, 5)
-                do_file(file, offset, size)
-        elif magic == MH_MAGIC:
-            do_macho(file, 32, BIG_ENDIAN)
-        elif magic == MH_CIGAM:
-            do_macho(file, 32, LITTLE_ENDIAN)
-        elif magic == MH_MAGIC_64:
-            do_macho(file, 64, BIG_ENDIAN)
-        elif magic == MH_CIGAM_64:
-            do_macho(file, 64, LITTLE_ENDIAN)
-
-    assert(len(what) >= len(value))
-    do_file(open(path, 'r+b'))
-
-
-if __name__ == '__main__':
-    main()
-
-## TODO:
-## Copy python.exe.manifest
-## Monkeypatch distutils.sysconfig
diff --git a/llvm/projects/hpvm-tensor-rt/table_fixer.py b/llvm/projects/hpvm-tensor-rt/table_fixer.py
deleted file mode 100644
index 3095d15aba0757aca3b74705ba57b5e189b5cecb..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/table_fixer.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Fixes table format
-# Remove all instances of cifar10 --> each col should start with fp16 or fp32
-# Combine multiple tables
-
-def fix_columns(table_name, new_filename):
-    table_file = open(table_name, "r")
-
-    new_table_file = []
-
-    for line in table_file:
-        line = line.strip()
-        if line.startswith("**"):
-            col_names = line.split()
-            new_col_names = []
-
-            for col_name in col_names:
-                if col_name.find("fp16") != -1: 
-                    new_col_names.append(col_name[col_name.find("fp16") : ])
-                elif col_name.find("fp32") != -1:
-                    new_col_names.append(col_name[col_name.find("fp32") : ])
-                else:
-                    new_col_names.append(col_name)
-            new_table_file.append(' '.join(new_col_names))
-        else:
-            new_table_file.append(line)
-    table_file.close()
-    table_file_new = open(new_filename, "w")
-    table_file_new.write('\n'.join(new_table_file))
-    table_file_new.close()
-
-def combine_tables(table1, table2, new_filename):
-    table1_file = open(table1, "r")
-    table2_file = open(table2, "r")
-
-    table1_data = table1_file.read().strip().split('\n')
-    table2_data = table2_file.read().strip().split('\n')
-    new_contents = []
-
-    table2_ind = 0
-    for table1_line in table1_data:
-        table2_line = table2_data[table2_ind]
-
-        if table1_line.startswith("**"):
-            assert table2_line.startswith("**")
-            table2_lst = table2_line.strip().split()
-            table2_cols = ' '.join(table2_lst[3 : ])
-            new_contents.append(table1_line + ' ' + table2_cols)
-        else:
-            table2_lst = table2_line.strip().split()
-            table2_cols = ' '.join(table2_lst[1 : ])
-            new_contents.append(table1_line + ' ' + table2_cols)
-        table2_ind += 1
-
-    table1_file.close()
-    table2_file.close()
-
-    new_file = open(new_filename, "w")
-    new_file.write('\n'.join(new_contents))
-    new_file.close()
-
-import sys
-
-if __name__ == "__main__":
-    num_args = len(sys.argv)
-
-    if num_args != 4 and num_args != 5:
-        print("python table_fixer.py <fix> <filename> OR <combine> <table1> <table2> <new name>")
-        exit(1)
-    elif sys.argv[1] == "fix":
-        fix_columns(sys.argv[2], sys.argv[3])
-    elif sys.argv[1] == "combine":
-        combine_tables(sys.argv[2], sys.argv[3], sys.argv[4])
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/#error.h# b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/#error.h#
deleted file mode 100644
index d474152fd80ecc90e07092795e513e2d97da0129..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/#error.h#
+++ /dev/null
@@ -1,627 +0,0 @@
-
-#ifndef ERROR_HEADER
-#define ERROR_HEADER
-
-
-#include <stdio.h>
-#include <stdarg.h>
-#include <cstdio>
-#include <cstdlib>
-#include <cmath>
-#include <ctime>
-#include <cfloat>
-#include <algorithm>
-#include <sstream>
-#include <vector>
-#include <iostream>
-#include <random>
-#include <string>
-#include <time.h>
-
-#include <curand.h>
-#include <curand_kernel.h>
-#include <math.h>
-#include <assert.h>
-
-
-#include "../include/debug.h"
-#include "tensor.h"
-#include "profiling.h"
-#include "tensor_utils.cu"
-#include "global_data.h"
-
-
-
-
-void readOpenTunerFlags(char* file_name){
-
-  total_ops = 0;
-  op_counter = 0;
-  op_accuracies.clear();
-
-  
-  FILE* fp = fopen(file_name, "r");
-  if(fp == NULL){
-    ERROR("File 'opentuner_flags' not found \n");
-  }
-    
-  int retVal = 200;
-  while(retVal != EOF){
-
-    int op_acc;
-    if(fp != NULL)
-      retVal = fscanf(fp, "%d", &op_acc);
-    else
-      op_acc = 0;
-    
-    op_accuracies.push_back(op_acc);
-    //printf("op_accuracies = %d, total_ops =%d \n", op_accuracies[total_ops], total_ops);
-    total_ops++;
-  }
-  
-  fclose(fp);
-}
-
-
-
-
-
-/*__device__ inline void atomicAdd(float* address, float value)
-
-{
-
-  float old = value;
-  float new_old;
-
-  do{
-    new_old = atomicExch(address, 0.0f);
-    new_old += old;
-  }
-
-  while ((old = atomicExch(address, new_old))!=0.0f);
-
-};
-*/
-
-
-
-
-
-Norm_t* calculateNorms(Tensor* x, Tensor* x_orig){
-
-  deviceToHostCopy(x);
-  deviceToHostCopy(x_orig);
-
-  // NOTE: Move floats to doubles - overflow is quite possible
-  float l1_norm = 0.0;
-  float l2_norm = 0.0;
-  float inf_norm = -1.0;
-  double total = 0.0;
-
-  float* arr1 = (float*) x->host_data;
-  float* arr2 = (float*) x_orig->host_data;
-  
-  for(unsigned int i = 0; i < x->num_elems; i++){
-
-    total = total + arr2[i];
-    
-    float diff = abs(arr1[i] - arr2[i]);
-    l1_norm += diff;
-    l2_norm += (arr1[i] - arr2[i]) *  (arr1[i] - arr2[i]);
-
-    if(inf_norm < diff)
-      inf_norm = diff;
-  }
-
-  l1_norm = l1_norm / (x->num_elems * 1.0);
-  l2_norm = l2_norm / (x->num_elems * 1.0);
-
-  double distribution_mean = total / (x->num_elems * 1.0);
-  l1_norm = l1_norm / distribution_mean;
-  l2_norm = l2_norm / distribution_mean;
-
-    
-  Norm_t* norms = (Norm_t*) malloc(sizeof(Norm_t));
-  norms->l1_norm = l1_norm;
-  norms->l2_norm = l2_norm;
-  norms->inf_norm = inf_norm;  
-  
-  INFO("l1_norm = %f \n", l1_norm);
-  INFO("l2_norm = %f \n", l2_norm);
-  INFO("inf_norm = %f \n", inf_norm);
-
-  return norms;
-}
-
-
-
-Norm_t* calculateNorms2(Tensor* x, Tensor* x_orig){
-
-  deviceToHostCopy(x);
-  deviceToHostCopy(x_orig);
-
-  // NOTE: Move all floats to doubles - overflow is quite possible
-  double l0_norm_A = 0.0;
-  double l0_norm_B = 0.0;
-
-  double l1_norm_A = 0.0;
-  double l1_norm_B = 0.0;
-  
-  double l2_norm_A = 0.0;
-  double l2_norm_B = 0.0;
-  float inf_norm = -1.0;
-  float orig_inf_norm = -1.0;
-  double total_diff = 0.0;
-  double total_diff_squared = 0.0;
- 
-  float* arr1 = (float*) x->host_data;
-  float* arr2 = (float*) x_orig->host_data;
-  
-  for(unsigned int i = 0; i < x->num_elems; i++){
-
-    if(arr2[i] != 0.0)
-      l0_norm_A = l0_norm_A + 1.0;
-    if(arr1[i] != 0.0)
-      l0_norm_B = l0_norm_B + 1.0;
-        
-    l1_norm_A = l1_norm_A + abs(arr2[i]);
-    l1_norm_B = l1_norm_B + abs(arr1[i]);
-
-    l2_norm_A = l2_norm_A + (arr2[i] * arr2[i]);
-    l2_norm_B = l2_norm_B + (arr1[i] * arr1[i]);
-      
-    float diff = abs(arr1[i] - arr2[i]);
-    total_diff = total_diff + diff;
-    float diff_squared = diff * diff;
-    total_diff_squared = total_diff_squared + diff_squared; 
-
-
-    if(orig_inf_norm < diff){
-      orig_inf_norm = diff;
-    }
-    
-    // Relative difference value
-    float normalized_diff = diff / arr2[i];   
-    if(inf_norm < normalized_diff){
-      inf_norm = normalized_diff;
-    }    
-  }
-
-  // Relative L1 and Mean L1 norms of the difference Matrix
-  float mean_l1 = ( total_diff ) / x->num_elems;
-  float relative_l1 = ( total_diff ) / l1_norm_A;
-
-  // Computing Relative L2 norm - i.e., Euclidean distance
-  double norm_root_A = sqrt(l2_norm_A);
-  double diff_root = sqrt(total_diff_squared);
-  float mean_l2 = diff_root / x->num_elems;
-  float relative_l2 = diff_root / norm_root_A;
-
-  // Packing computed norms in Norm_t struct
-  Norm_t* norms = (Norm_t*) malloc(sizeof(Norm_t));
-  // Mean metrics - not normalized for the distribution - suitable for precision tuning hardware
-  norms->mean_l1 = mean_l1;
-  norms->mean_l2 = mean_l2;
-  norms->orig_inf_norm = orig_inf_norm;
-
-  // Relative metrics (relative to distribution) - suitable for PROMISE
-  norms->l1_norm = relative_l1;
-  norms->l2_norm = relative_l2;
-  norms->inf_norm = inf_norm;  
-  
-  INFO("l1_norm = %f \n", relative_l1);
-  INFO("l2_norm = %f \n", relative_l2);
-  INFO("inf_norm = %f \n", inf_norm);
-
-  return norms;
-}
-
-
-
-
-
-__global__ void normComputeKernel(float* A, float * B, double* l1_A, double* l2_A,
-				  double* l1_diff, double* l2_diff, unsigned int n){
-
-  int i = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(i < n){
-    
-    double diff = fabsf(A[i] - B[i]);
-    double diff_squared = diff * diff;   
-
-    atomicAdd( l1_A,  fabsf(A[i]) );
-    atomicAdd( l2_A, (A[i] * A[i]) );
-
-    atomicAdd( l1_diff, diff);
-    atomicAdd( l2_diff, diff_squared);
-  }
-}
-
-
-
-// Compute Norms on the GPU
-Norm_t* calculateNormsGPU(Tensor* x, Tensor* x_orig){
-
-  hostToDeviceCopy(x);
-  hostToDeviceCopy(x_orig);
-
-  // FIXIT: Move all floats to doubles - overflow is possible
-  
-  double l1_norm_A;
-  double l2_norm_A;
-
-  double l1_diff;
-  double l2_diff;
-
-  // Device pointers
-  double *l1_norm_A_d;
-  double *l2_norm_A_d;
-  double *l1_diff_d;
-  double *l2_diff_d;
-  
-  cudaMalloc( (void**) &l1_norm_A_d, sizeof(double));
-  cudaMalloc( (void**) &l2_norm_A_d, sizeof(double));
-  cudaMalloc( (void**) &l1_diff_d, sizeof(double));
-  cudaMalloc( (void**) &l2_diff_d, sizeof(double));
- 
-    
-  float* arr1 = (float*) x->gpu_data;
-  float* arr2 = (float*) x_orig->gpu_data;
-
-  int blockSize = 1024;
-  int gridSize = (int) ceil ((float) x->num_elems / blockSize);
-  INFO("blockSize = %d, gridSize = %d \n", blockSize, gridSize);
-
-  normComputeKernel<<<gridSize, blockSize>>>(arr1, arr2, l1_norm_A_d, l2_norm_A_d, l1_diff_d, l2_diff_d, x->num_elems);
-
-  cudaMemcpy(&l1_norm_A, l1_norm_A_d, sizeof(double), cudaMemcpyDeviceToHost);
-  cudaMemcpy(&l2_norm_A, l2_norm_A_d, sizeof(double), cudaMemcpyDeviceToHost);
-  cudaMemcpy(&l1_diff, l1_diff_d, sizeof(double), cudaMemcpyDeviceToHost);
-  cudaMemcpy(&l2_diff, l2_diff_d, sizeof(double), cudaMemcpyDeviceToHost);
-  
-
-  // Relative L1 and Mean L1 norms of the difference Matrix
-  float mean_l1 = l1_diff / x->num_elems;
-  float relative_l1 = l1_diff / l1_norm_A;
-
-  // Computing Relative L2 norm - i.e., Euclidean distance
-  double norm_root_A = sqrt(l2_norm_A);
-  double diff_root = sqrt(l2_diff);
-  float mean_l2 = diff_root / x->num_elems;
-  float relative_l2 = diff_root / norm_root_A;
-
-  // Packing computed norms in Norm_t struct
-  Norm_t* norms = (Norm_t*) malloc(sizeof(Norm_t));
-  // Mean metrics - not normalized for the distribution - suitable for precision tuning hardware
-  norms->mean_l1 = mean_l1;
-  norms->mean_l2 = mean_l2;
-  norms->orig_inf_norm = 0.0;
-
-  // Relative metrics (relative to distribution) - suitable for PROMISE
-  norms->l1_norm = relative_l1;
-  norms->l2_norm = relative_l2;
-  norms->inf_norm = 0.0;  
-  
-  INFO("l1_norm = %f \n", relative_l1);
-  INFO("l2_norm = %f \n", relative_l2);
-
-  return norms;
-}
-
-
-
-
-__global__ void vecConstMul(float* A, float mul_factor, int n){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(id < n)
-    A[id] = A[id] * mul_factor; 
-}
-
-
-__global__ void vecRound(float* A, int n){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(id < n)
-    A[id] = roundf(A[id]); 
-}
-
-
-__global__ void vecConstDiv(float* A, float div_factor, int n){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(id < n)
-    A[id] = A[id] / div_factor; 
-}
-
-
-
-__global__ void vecMul(float* A, float* B, int n){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(id < n)
-    B[id] = A[id] * B[id]; 
-}
-
-
-/****  ERROR injecion routines  ******/
-
-void initRandValues(Tensor* bias, int error_scale){
-
-  float scaling_values[20];
-
-  // FIXIT: Error knob 0 should be 0 zero
-  scaling_values[0] = 0.016;
-  scaling_values[1] = 0.018;
-  scaling_values[2] = 0.022;
-  scaling_values[3] = 0.026;
-  scaling_values[4] = 0.030;
-  scaling_values[5] = 0.035;  
-  scaling_values[6] = 0.04;
-  scaling_values[7] = 0.06;
-  scaling_values[8] = 0.08;
-  scaling_values[9] = 0.1;
-  //scaling_values[8] = 0.15;
-  //scaling_values[9] = 0.2;
-  scaling_values[10] = 0.25;
-  scaling_values[11] = 0.3;
-  scaling_values[12] = 0.35;
-  scaling_values[13] = 0.4;
-  scaling_values[14] = 0.45;
-  // Values below are currently unused by Opentuner
-  scaling_values[15] = 0.5;
-  scaling_values[16] = 0.55;
-  scaling_values[17] = 0.6;
-  scaling_values[18] = 0.65;
-  scaling_values[19] = 0.7;
-
-  curandGenerator_t gen;
-
-  struct timespec ts;
-  if(timespec_get(&ts, TIME_UTC) == 0){
-    printf("crashed \n");
-    abort();
-  }
-
-  curandCreateGenerator(&gen, CURAND_RNG_PSEUDO_DEFAULT);
-
-  curandSetPseudoRandomGeneratorSeed(gen, ts.tv_nsec^ts.tv_sec);
-    
-  curandGenerateNormal(gen, (float*) bias->gpu_data, bias->num_elems, 0.0, 1.0 * scaling_values[error_scale]);
-
-  
-  /*
-  std::random_device rd;
-  std::mt19937 mt(rd());
-  std::normal_distribution<float> distribution(0.0, 1.0);
-  
-  float* data_arr = (float*) bias->host_data;
-  for(int i = 0; i < bias->num_elems; i++){
-    float rand_num = distribution(mt);
-    data_arr[i] = scaling_values[error_scale] * rand_num;   
-  }
-  */
-  
-}
-
-
-
-void* addBitError(void* x_ptr, int error_scale){
-
-  if(error_scale > 6 || error_scale < 0){
-    ERROR("Error Scale out of bounds \n");
-  }
-      
-  INFO("*** TensorBitError \n");  
-  profileEvent("tensorBitError");
-
-  Tensor* x = (Tensor*) x_ptr;
-  
-  size_t* dim_sizes = x->dims.dim_sizes; 
-  Tensor* x_original = (Tensor*) create4DTensor(x->data_type, x->data_format,
-					        dim_sizes[0], dim_sizes[1],
-						dim_sizes[2], dim_sizes[3]);
-
-  // Copying x data into x_original - for computing Norms 
-  tensorCopy(x, x_original);
-
-  // Quadratic Error
-  float freq_factors[6];
-  freq_factors[0] = 0.1;
-  freq_factors[1] = 0.2;
-  freq_factors[2] = 0.4;
-  freq_factors[3] = 0.6;
-  freq_factors[4] = 0.8;
-  freq_factors[5] = 1.0;
-
-  float error_freq = freq_factors[error_scale];
-  
-  deviceToHostCopy(x);
-
-  unsigned char* data_arr = reinterpret_cast<unsigned char*>(x->host_data);
-  // FIXIT: Need to be careful about floating point datatype assumptions
-  int size_of_elem = 4; 
-
-  long int total_bytes = x->size_in_bytes;
-  long int error_iterations = total_bytes * 0.01 * error_freq;
-  INFO("total_bytes = %lu, error_iterations = %lu \n", total_bytes, error_iterations);
-
-  srand(time(NULL));
-  
-  for(int i = 0; i < error_iterations; i++){
-    // FIXIT: The rand() is only specific to int - need long 
-    long int index = rand() % total_bytes;
-    int N = 5; // The operation below flips the Nth bit 
-    unsigned char fil = 1UL << N;
-    unsigned char val = data_arr[index];
-    char flipped = val^fil;
-    data_arr[i] = flipped;
-  }
-  
-
-  Norm_t* norms = calculateNorms2(x, x_original);
-  
-  profileEvent("tensorBitError_end", true);
-  
-  return (void*) norms;
-}
-
-
-void randomCeilAndFloor(float* x, size_t num_elems){
-
-  INFO("randomCeilAndFloor\n");
-  
-  std::random_device rd;
-  std::mt19937 mt(rd());
-  std::normal_distribution<float> distribution(0.0, 1.0);
-
-  for(size_t i = 0; i < num_elems; i++){
-    float rand_num = distribution(mt);
-    int val = abs(((int) rand_num) % 2);
-    if(val == 0)
-      x[i] = floor(x[i]);
-    else if(val == 1)
-      x[i] = ceil(x[i]);
-  }
-
-}
-
-// Routine for Adding RoundOff Errors
-void* addRoundError(void* x_ptr, int error_scale){
-
-  if(error_scale > 11 || error_scale < 0){
-    ERROR("Error Scale out of bounds \n");
-  }
-      
-  INFO("*** TensorRoundError \n");  
-  profileEvent("tensorRoundError");
-
-  Tensor* x = (Tensor*) x_ptr;
-  
-  size_t* dim_sizes = x->dims.dim_sizes; 
-  Tensor* x_original = (Tensor*) create4DTensor(x->data_type, x->data_format,
-					        dim_sizes[0], dim_sizes[1],
-						dim_sizes[2], dim_sizes[3]);
-
-  // Copying x data into x_original - for computing Norms 
-  tensorCopy(x, x_original);
-
-  float round_factors[12];
-  round_factors[0] = 1000000; // FIXIT: This should be zero error
-  round_factors[1] = 100;
-  round_factors[2] = 10;
-  round_factors[3] = 7; // Beyond this point, the error function is linear
-  round_factors[4] = 3;
-  round_factors[5] = 1;
-  round_factors[6] = 0.7;
-  round_factors[7] = 0.3;
-  round_factors[8] = 0.1;
-  round_factors[9] = 0.07;
-  round_factors[10] = 0.03;
-  round_factors[11] = 0.01;
-  
-  // THINK: Considering using error magnitudes in this scenario
-  
-
-  float round_factor = round_factors[error_scale];
-  INFO("round_factor = %f \n", round_factor);
-  
-  hostToDeviceCopy(x);
-
-  int blockSize = 128;
-  int gridSize = (int) ceil ((float) x->num_elems / blockSize);
-  INFO("blockSize = %d, gridSize = %d \n", blockSize, gridSize);
-
-  // NOTE: Check if a large gridSize will work with really large tensors
-  vecConstMul<<<gridSize, blockSize>>>((float*) x->gpu_data, round_factor, x->num_elems);
-  //vecRound<<<gridSize, blockSize>>>((float*) x->gpu_data, x->num_elems);
-  
-  deviceToHostCopy(x);
-  randomCeilAndFloor((float*) x->host_data, x->num_elems);
-  hostToDeviceCopy(x);
-  
-  vecConstDiv<<<gridSize, blockSize>>>((float*) x->gpu_data, round_factor, x->num_elems);
-  
-  Norm_t* norms = calculateNorms2(x, x_original);
-  
-  profileEvent("tensorRoundError_end", true);
-  
-  return (void*) norms;
-}
-
-
-
-
-// Routine for Adding Gaussian Error
-void* addGaussianError(void* x_ptr, int error_scale){
-
-  if(error_scale > 11 || error_scale < 0){
-    ERROR("Error Scale out of bounds \n");
-  }
-      
-  INFO("*** TensorAddError \n");  
-  profileEvent("tensorAddError");
-
-  Tensor* x = (Tensor*) x_ptr;
-  
-  size_t* dim_sizes = x->dims.dim_sizes;
-  Tensor* bias = (Tensor*) create4DTensor(x->data_type, x->data_format,
-					  dim_sizes[0], dim_sizes[1],
-					  dim_sizes[2], dim_sizes[3]);
-  
-  Tensor* x_original = (Tensor*) create4DTensor(x->data_type, x->data_format,
-					        dim_sizes[0], dim_sizes[1],
-						dim_sizes[2], dim_sizes[3]);
-
-  // Copying x data into x_original - for computing Norms 
-  tensorCopy(x, x_original);
-
-  // NOTE: Error scale is used to generate the bias matrix
-  initRandValues(bias, error_scale);  
-
-  hostToDeviceCopy(x);
-  //hostToDeviceCopy(bias);
-
- 
-  int blockSize = 1024;
-  int gridSize = (int) ceil ((float) x->num_elems / blockSize);
-  INFO("blockSize = %d, gridSize = %d \n", blockSize, gridSize);
-
-  // NOTE: Check if a large gridSize will work with really large tensors
-  vecMul<<<gridSize, blockSize>>>((float*) x->gpu_data, (float*) bias->gpu_data, x->num_elems);
-  
-  float alpha = 1.0f, beta = 0.0f;
-    
-  // FIXIT: routine fails for 3D tensors
-  checkCUDNN(cudnnAddTensor(cudnnHandle, &alpha, bias->tensor_desc,
-			    bias->gpu_data, &alpha, x->tensor_desc, x->gpu_data));
-
-
-  //Norm_t* norms = calculateNorms2(x, x_original);
-  Norm_t* norms = calculateNormsGPU(x, x_original);
-  
-  
-  profileEvent("tensorAddError_end", true);
-  
-  return (void*) norms;
-}
-
-
-
-void* tensorAddError(void* x_ptr, int error_scale){
-
-  void * new_x = addGaussianError(x_ptr, error_scale);
-  //void * new_x = addRoundError(x_ptr, error_scale);
-  //void * new_x = addBitError(x_ptr, error_scale);
-  return new_x;
-}
-
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/.#error.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/.#error.h
deleted file mode 120000
index a9c72af5b6737a57be1db44cd3231c6dda0857f0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/.#error.h
+++ /dev/null
@@ -1 +0,0 @@
-hsharif3@tyler.cs.illinois.edu.21294:1541049775
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_api.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_api.h
deleted file mode 100644
index 811fa4090ff2f4399b31bfb0ec228801eee844e0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_api.h
+++ /dev/null
@@ -1,70 +0,0 @@
-
-
-#include "tensor.h"
-
-
-extern "C"{
-
-  // NOTE: API for tensorGroupConvolution
-  // API for Running Tensor Convolution with CUTLASS
-  void* tensorConvCutlass(void* input, void* filter,
-			  int vertical_pad, int horizontal_pad,
-			  int vertical_stride, int horizontal_stride,
-			  int conv_mode, int conv_groups);
-
-  void* tensorHalfConvCutlass(void* input, void* filter,
-			      int vertical_pad, int horizontal_pad,
-			      int vertical_stride, int horizontal_stride,
-			      int conv_mode, int conv_groups);
-
-
-  // Perforated Tensor Conv with 'perforation_rate' parameter
-  void* tensorConvPerf(void* input, void* filter,
-		       int vertical_pad, int horizontal_pad,
-		       int vertical_stride, int horizontal_stride,
-		       int conv_mode, int conv_groups, int row, int col);
-
-  void* tensorConvolutionKernelSamp(void* input, void* filter_ptr,
-				    int vertical_pad, int horizontal_pad, int vertical_stride,
-				    int horizontal_stride, int conv_mode, int conv_groups, int skip_every);
-
-  void* tensorConvPerfCuda(void* input, void* filter,
-			   int vertical_pad, int horizontal_pad,
-			   int vertical_stride, int horizontal_stride,
-			   int conv_mode, int conv_groups,
-			   int row, int col, int start);
-
-  void* tensorConvPerfCudaHalf(void* input_ptr, void* filter_ptr,
-			       int vertical_pad, int horizontal_pad,
-			       int vertical_stride, int horizontal_stride,
-			       int conv_mode, int conv_groups,
-			       int row, int col, int start);
-  
-  void sampleFilter(Tensor* filter, int skip_rate, int skip_offset);
-
-  void* tensorConvSampSim(void* input_ptr, void* filter_ptr,
-			  int vertical_pad, int horizontal_pad,
-			  int vertical_stride, int horizontal_stride,
-			  int conv_mode, int conv_groups,
-			  int skip_rate, int skip_offset);
-
-  void* tensorConvInputHalf(void* input_ptr, void* filter_ptr,
-              int vertical_pad, int horizontal_pad, int vertical_stride,
-              int horizontal_stride, int conv_mode, int conv_groups,
-              int skip_every, int skip_offset);
-
-  void* tensorConvApproxHalf(void* input_ptr, void* filter_ptr,
-			     int vertical_pad, int horizontal_pad,
-			     int vertical_stride, int horizontal_stride,
-			     int conv_mode, int conv_groups,
-			     int row, int col,
-			     int skip_every, int skip_offset);
-
-  void* tensorConvApprox(void* input_ptr, void* filter_ptr,
-			 int vertical_pad, int horizontal_pad,
-			 int vertical_stride, int horizontal_stride,
-			 int conv_mode, int conv_groups,
-			 int row, int col,
-			 int skip_every, int skip_offset);
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_simulation.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_simulation.h
deleted file mode 100644
index 384464f44ddc32524db5de07a59b4e38d1422a8e..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_simulation.h
+++ /dev/null
@@ -1,1399 +0,0 @@
-
-
-#ifndef SIM_HEADER
-#define SIM_HEADER
-
-
-
-#include "tensor_runtime.h"
-#include "tensor_utils.cu"
-#include "debug.h"
-#include "profiling.h"
-#include "fp16_conversion.h"
-#include "global_data.h"
-#include "error.h"
-#include "tensor.h"
-#include "op_overheads.h"
-#include "half_precision_api.h"
-#include "approx_techniques2.h"
-#include <unordered_map>
-
-
-
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void postInterpolateRow(int N, int n, int c, int h, int w, float* data, int int_row){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if((row % int_row == 1) && (row != 0) && (row != h-1))
-      data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	(data[n * (c * h * w) + ch * (h * w) + (row - 1) * (w) + col] +
-	 data[n * (c * h * w) + ch * (h * w) + (row + 1)  * (w) + col])/2;
-
-  }
-}
-
-
-
-__global__
-void postInterpolateCol(int N, int n, int c, int h, int w, float* data, int int_col){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if((col % int_col == 1) && (col != 0) && (col != w-1))
-      data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	(data[n * (c * h * w) + ch * (h * w) + row * (w) + (col-1) ] +
-	 data[n * (c * h * w) + ch * (h * w) + row * (w) + (col+1) ])/2;
-
-  }
-}
-
-
-
-
-// A 'Simulation' of perforated tensor convolution
-void* tensorConvPerfSim(void* input_ptr, void* filter_ptr,
-			int vertical_pad, int horizontal_pad,
-			int vertical_stride, int horizontal_stride,
-			int conv_mode, int conv_groups, int row, int col){
-  
-
-  INFO("*** TensorConvolution \n");
-  profileEvent("tensorConv");
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* filter = (Tensor*) filter_ptr;
-
-  cudnnConvolutionDescriptor_t convDesc;
-  cudnnConvolutionFwdAlgo_t convAlgo;
-  cudnnConvolutionMode_t mode;
-  
-  if(conv_mode == 0)
-    mode = CUDNN_CONVOLUTION;
-  else if(conv_mode == 1)
-    mode = CUDNN_CROSS_CORRELATION;
-
-  float alpha = 1.0f, beta = 0.0f;
-
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  INFO("vertical_stride = %lu, horizontal_stride = %lu \n", vertical_stride, horizontal_stride);
-
-  checkCUDNN(cudnnCreateConvolutionDescriptor(&convDesc));
-
-  //FIXME: Current hack to preserve backward compatibilty
-  if(conv_groups == 0){
-    conv_groups = 1;
-  }
-
-  // NOTE: Adding support for grouped convolution
-  checkCUDNN(cudnnSetConvolutionGroupCount(convDesc, conv_groups));
-
-  int new_v = vertical_stride + 0;
-  int new_h = horizontal_stride + 0;
-  cudnnDataType_t computeType = CUDNN_DATA_FLOAT;
-  
-  checkCUDNN(cudnnSetConvolution2dDescriptor(convDesc,
-					     vertical_pad, horizontal_pad, // conv padding
-					     new_v, new_h, // conv strides
-					     1, 1, // upscaling values
-					     mode , // mode is configurable
-					     computeType)); // defines compute precision
-
-  int n, c, h, w; // output dimensions
-  // Find dimension of convolution output
-  checkCUDNN(cudnnGetConvolution2dForwardOutputDim(convDesc,
-						   input->tensor_desc,
-						   filter->filter_desc,
-						   &n, &c, &h, &w));
-
-
-  DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-  Tensor* output;
-  if(input->data_format == CUDNN_TENSOR_NCHW)
-    output = (Tensor*) create4DTensor((cudnnDataType_t) input->data_type,
-				      CUDNN_TENSOR_NCHW, n, c, h, w);
-  else if(input->data_format == CUDNN_TENSOR_NHWC){
-    DEBUG("* NHWC Format \n");
-    output = (Tensor*) create4DTensor((cudnnDataType_t) input->data_type,
-				      CUDNN_TENSOR_NHWC, n, h, w, c);
-  }
-  else
-    ERROR("Unsupported Tensor Type");
-
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE);
-  // NOTE: Necessary to insert the above call for every output tensor
-
-  DEBUG("tensor->data_type = %d, tensor->data_format = %d, N = %d, C = %d, H = %d, W = %d \n",
-	output->data_type, output->data_format, output->dims.dim_sizes[0],
-	output->dims.dim_sizes[1],
-	output->dims.dim_sizes[2], output->dims.dim_sizes[3]);
-
-  if(convDesc == NULL || input->tensor_desc == NULL ||
-     filter->filter_desc == NULL || output->tensor_desc == NULL)
-    ERROR("NULL descriptor! \n");
-
-
-
-  // NOTE-FIXIT: function failing for NHWC formats - perhaps some CUDNN support is lacking
-  checkCUDNN(cudnnGetConvolutionForwardAlgorithm(cudnnHandle,
-						 input->tensor_desc,
-						 filter->filter_desc,
-						 convDesc,
-						 output->tensor_desc,
-						 CUDNN_CONVOLUTION_FWD_PREFER_FASTEST,
-						 //CUDNN_CONVOLUTION_FWD_NO_WORKSPACE,
-						 0,
-						 &convAlgo));
-
-
-  DEBUG("ConvAlgo = %d, FFT = %d, GEMM = %d, WINOGRAD = %d \n", convAlgo,
-	CUDNN_CONVOLUTION_FWD_ALGO_FFT, CUDNN_CONVOLUTION_FWD_ALGO_GEMM,
-	CUDNN_CONVOLUTION_FWD_ALGO_WINOGRAD);
-
-
-  // FIXIT: Algo shouldn't be hardcoded
-  convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_PRECOMP_GEMM;
-
-  size_t workspace_size;
-  checkCUDNN(cudnnGetConvolutionForwardWorkspaceSize(cudnnHandle,
-						     input->tensor_desc,
-						     filter->filter_desc,
-						     convDesc,
-						     output->tensor_desc,
-						     convAlgo,
-						     &workspace_size));
-
-  // Allocating memory for the convolution workspace
-  void* workspace;
-  checkCudaErrors(cudaMalloc(&workspace, workspace_size));
-  DEBUG("workspace size = %d \n", workspace_size);
-
-
-  checkCUDNN(cudnnConvolutionForward(cudnnHandle, &alpha, input->tensor_desc,
-				     input->gpu_data, filter->filter_desc, filter->gpu_data,
-				     convDesc, convAlgo, workspace, workspace_size,
-				     &beta, output->tensor_desc, output->gpu_data));
-
-
-  h = (2 * vertical_pad + input->dims.dim_sizes[2] - filter->dims.dim_sizes[2]) / vertical_stride + 1;
-  w = (2 * horizontal_pad + input->dims.dim_sizes[3] - filter->dims.dim_sizes[3]) / horizontal_stride + 1;
-
-
-  int numBlocks = (n * c * h * w  + 127) / 128;
-
-  if (row > 0)
-    postInterpolateRow<<<numBlocks,128>>>(n * c * h * w, n, c, h, w,
-				         (float *) output->gpu_data, row);
-
-  if (col > 0)
-    postInterpolateCol<<<numBlocks,128>>>(n * c * h * w, n, c, h, w,
-				         (float *) output->gpu_data, col);
-
-
-  profileEvent("tensorConv_end", true);
-
-  return output;
-}
-
-
-
-
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void sampleFilterElems(int N,
-		       int n, int c, int h, int w,
-		       float* data,
-		       int skip_elem, int skip_offset, float mul_factor){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    //int local_index = row * w + col;
-    int local_index = (ch * (h * w)) + (row * w) + col;
-    
-    if(local_index % skip_elem  == skip_offset)
-       data[n * (c * h * w) + ch * (h * w) + row * (w) + col] = 0;
-    else
-      data[n * (c * h * w) + ch * (h * w) + row * (w) + col] *= mul_factor;
-      
-  }
-}
-
-
-
-
-
-void sampleFilter(Tensor* filter, int skip_rate, int skip_offset){
-
-  int n = filter->dims.dim_sizes[0];
-  int c = filter->dims.dim_sizes[1];
-  int h = filter->dims.dim_sizes[2];
-  int w = filter->dims.dim_sizes[3];
-    
-  int numBlocks = (n * c * h * w  + 127) / 128;
-  int N = n * c * h * w;
-  //float mul_factor = skip_rate / (skip_rate - 1); 
-  float mul_factor = (skip_rate * 1.0) / (skip_rate - 1); 
-
-  printf ("mul_factor = %f \n", mul_factor);
-  
-  sampleFilterElems<<<numBlocks,128>>>(N,
-				       n, c, h, w,
-				       (float *) filter->gpu_data,
-				       skip_rate, skip_offset, mul_factor);
-
-}
-
-
-
-// A 'Simulation' of perforated tensor convolution
-void* tensorConvSampSim(void* input_ptr, void* filter_ptr,
-			int vertical_pad, int horizontal_pad,
-			int vertical_stride, int horizontal_stride,
-			int conv_mode, int conv_groups,
-			int skip_rate, int skip_offset){
-  
-
-  INFO("*** TensorConvolution \n");
-  profileEvent("tensorConv");
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* filter = (Tensor*) filter_ptr;
-
-  
-  cudnnConvolutionDescriptor_t convDesc;
-  cudnnConvolutionFwdAlgo_t convAlgo;  
-  cudnnConvolutionMode_t mode;
-  
-  if(conv_mode == 0)
-    mode = CUDNN_CONVOLUTION;
-  else if(conv_mode == 1)
-    mode = CUDNN_CROSS_CORRELATION;
-
-  float alpha = 1.0f, beta = 0.0f;
-
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  convertToFP32(input);
-  convertToFP32(filter);
-
-  
-  // Zeroing (+Scaling) Filter elements to 'Simulate' input sampling
-  sampleFilter(filter, skip_rate, skip_offset);
-  
-
-  INFO("vertical_stride = %lu, horizontal_stride = %lu \n", vertical_stride, horizontal_stride);
-
-  checkCUDNN(cudnnCreateConvolutionDescriptor(&convDesc));
-
-  //FIXME: Current hack to preserve backward compatibilty
-  if(conv_groups == 0){
-    conv_groups = 1;
-  }
-
-  // NOTE: Adding support for grouped convolution
-  checkCUDNN(cudnnSetConvolutionGroupCount(convDesc, conv_groups));
-
-  int new_v = vertical_stride + 0;
-  int new_h = horizontal_stride + 0;
-  cudnnDataType_t computeType = CUDNN_DATA_FLOAT;
-  
-  checkCUDNN(cudnnSetConvolution2dDescriptor(convDesc,
-					     vertical_pad, horizontal_pad, // conv padding
-					     new_v, new_h, // conv strides
-					     1, 1, // upscaling values
-					     mode , // mode is configurable
-					     computeType)); // defines compute precision
-
-  int n, c, h, w; // output dimensions
-  // Find dimension of convolution output
-  checkCUDNN(cudnnGetConvolution2dForwardOutputDim(convDesc,
-						   input->tensor_desc,
-						   filter->filter_desc,
-						   &n, &c, &h, &w));
-
-
-  DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-  Tensor* output;
-  output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, 
-				      CUDNN_TENSOR_NCHW, n, c, h, w);
-  
-
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE);
-  // NOTE: Necessary to insert the above call for every output tensor
-
-  DEBUG("tensor->data_type = %d, tensor->data_format = %d, N = %d, C = %d, H = %d, W = %d \n",
-	output->data_type, output->data_format, output->dims.dim_sizes[0],
-	output->dims.dim_sizes[1],
-	output->dims.dim_sizes[2], output->dims.dim_sizes[3]);
-
-  if(convDesc == NULL || input->tensor_desc == NULL ||
-     filter->filter_desc == NULL || output->tensor_desc == NULL)
-    ERROR("NULL descriptor! \n");
-
-
-  // NOTE-FIXIT: function failing for NHWC formats - perhaps some CUDNN support is lacking
-  checkCUDNN(cudnnGetConvolutionForwardAlgorithm(cudnnHandle,
-						 input->tensor_desc,
-						 filter->filter_desc,
-						 convDesc,
-						 output->tensor_desc,
-						 CUDNN_CONVOLUTION_FWD_PREFER_FASTEST,
-						 //CUDNN_CONVOLUTION_FWD_NO_WORKSPACE,
-						 0,
-						 &convAlgo));
-
-
-  DEBUG("ConvAlgo = %d, FFT = %d, GEMM = %d, WINOGRAD = %d \n", convAlgo,
-	CUDNN_CONVOLUTION_FWD_ALGO_FFT, CUDNN_CONVOLUTION_FWD_ALGO_GEMM,
-	CUDNN_CONVOLUTION_FWD_ALGO_WINOGRAD);
-
-
-  // NOTE: Using GEMM-based Algo
-  convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_PRECOMP_GEMM;
-
-  size_t workspace_size;
-  checkCUDNN(cudnnGetConvolutionForwardWorkspaceSize(cudnnHandle,
-						     input->tensor_desc,
-						     filter->filter_desc,
-						     convDesc,
-						     output->tensor_desc,
-						     convAlgo,
-						     &workspace_size));
-
-  // Allocating memory for the convolution workspace
-  void* workspace;
-  checkCudaErrors(cudaMalloc(&workspace, workspace_size));
-  DEBUG("workspace size = %d \n", workspace_size);
-
-
-  checkCUDNN(cudnnConvolutionForward(cudnnHandle, &alpha, input->tensor_desc,
-				     input->gpu_data, filter->filter_desc, filter->gpu_data,
-				     convDesc, convAlgo, workspace, workspace_size,
-				     &beta, output->tensor_desc, output->gpu_data));
-
-
- 
-
-  profileEvent("tensorConv_end", true);
-
-  return output;
-}
-
-
-
-
-
-
-
-
-
-
-
-/************ NOTE: API for ApproxHPVM Wrapper runtime *******/ 
-
-
-void* PROMISE_Conv(void* input, float i_min, float i_max,
-		   void* filter, float w_min, float w_max,
-		   void* bias, float b_min, float b_max,
-		   int conv_pad_h, int conv_pad_w,
-		   int conv_stride_h, int conv_stride_w,
-		   int pool_id, int pool_size,
-		   int activation_id, // Relu, Tanh, ClipRelu
-		   float out_min, float out_max, int swing){ 
-
-
-  Tensor* input_t = (Tensor*) input;
-  Tensor* filter_t = (Tensor*) filter;
-  Tensor* bias_t = (Tensor*) bias;
-  
-  int orig_type = input_t->cur_type;
-
-  DEBUG("FP32 conversions \n");
-  
-  convertToFP32(input_t);
-
-  convertToFP32(filter_t);
-  convertToFP32(bias_t);
-
-  DEBUG("DONE FP32 conversions \n");
-  
-
-  if(swing < 8){
-    input = quantizeTensorPromise(input, i_min, i_max);
-    filter = quantizeTensorPromise(filter, w_min, w_max);
-    if(bias != NULL)
-      bias = quantizeTensorPromise(bias, b_min, b_max);
-    // aRead error
-    
-    input = addPromiseError(input, swing);
-  }
-
-  
-  void* conv_out;
-  conv_out = tensorConvolution(input, filter,
-			       conv_pad_h, conv_pad_w,
-			       conv_stride_h, conv_stride_w,
-			       1, 0);
-  
-  void* conv_add;
-  if(bias != NULL){
-    conv_add = tensorAdd(conv_out, bias);
-  }
-  else{
-    conv_add = conv_out;
-  }
-
-  void* pool_out;
-  // NOTE: Skip pooling on negative pool sizes
-  if(pool_size > 0){
-    //FIXME: Currently only using MaxPooling
-    pool_out = tensorPooling(conv_add, 0, pool_size, pool_size, 0, 0, pool_size, pool_size);
-  }
-  else{
-    pool_out = conv_add;
-  }
-  
-  void* activation_out;  
-  switch(activation_id){
-  case -1:
-    activation_out = pool_out;
-    INFO("NO Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorTanh(pool_out);
-    break;
-  case 1:
-    activation_out = tensorRelu(pool_out);
-    break;
-  case 2:
-    activation_out = tensorRelu2(pool_out, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-
-
-  if(swing < 8 && activation_id != -1){
-    activation_out = quantizeTensorPromise(activation_out, out_min, out_max);
-  }
-
-
-
-  //NOTE: Convert back to FP16 if original type
-  if (orig_type == half_type){
-    convertToFP16((Tensor*) activation_out);
-  }
-
-  
-  return activation_out;
-}
-
-
-
-void* PROMISE_FC(void* input, float i_min, float i_max,
-		 void* weights, float w_min, float w_max,
-		 void* bias, float b_min, float b_max,
-		 int activation_id,
-		 float out_min, float out_max, int swing){
-
-
-  Tensor* input_t = (Tensor*) input;
-  Tensor* weights_t = (Tensor*) weights;
-  Tensor* bias_t = (Tensor*) bias;
-  
-  int orig_type = input_t->cur_type;
-  
-  convertToFP32(input_t);
-  convertToFP32(weights_t);
-  convertToFP32(bias_t);
-  
-  
-  if(swing < 8){
-    input = quantizeTensorPromise(input, i_min, i_max);
-    weights = quantizeTensorPromise(weights, w_min, w_max);
-    if(bias != NULL)
-      bias = quantizeTensorPromise(bias, b_min, b_max);
-
-    // NOTE: Modelling aRead error in PROMISE
-    input = addPromiseError(input, swing);
-  }
-
-
-  
-  void* gemm_out;
-  gemm_out = tensorGemmGPU(input, weights);
-
-  
-  void* gemmbias_out;
-  if(bias != NULL){
-    gemmbias_out = tensorAdd(gemm_out, bias);
-  }
-  else{
-    gemmbias_out = gemm_out;
-  }
- 
-  void* activation_out;
-  switch(activation_id){
-
-  case -1:
-    activation_out = gemmbias_out;
-    INFO("No Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorTanh(gemmbias_out);
-    break;
-  case 1:
-    activation_out = tensorRelu(gemmbias_out);
-    break;
-  case 2:
-    activation_out = tensorRelu2(gemmbias_out, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-  
-  
-  if(swing < 8 && activation_id != -1){
-    activation_out = quantizeTensorPromise(activation_out, out_min, out_max);
-  }
-
-
-  //NOTE: Convert back to FP16 if original type
-  if (orig_type == half_type){
-    convertToFP16((Tensor*) activation_out);
-  }
-
-
-  
-  return activation_out;
-}
-
-
-
-
-
-// NOTE: Enabling the macro below is used for testing against the old PROMISE wrapper
-//#define OLD_MODEL
-
-#ifndef OLD_MODEL
-
-
-
-bool isPromise(int swing){
-
-  if(swing < 8)
-    return true;
-  else
-    return false;      
-}
-
-
-bool isFullPrecision(int swing){
-
-  if(swing == 11)
-    return true;
-  else
-    return false;      
-}
-
-
-
-bool isHalfPrecision(int swing){
-
-  if(swing == 12)
-    return true;
-  else
-    return false;      
-}
-
-
-bool isPerforation(int swing){
-
-  if(swing >= 21 && swing <= 30)
-    return true;
-  else
-    return false;      
-}
-
-
-bool isSampling(int swing){
-
-  if(swing >= 31 && swing <= 39)
-    return true;
-  else
-    return false;      
-}
-
-
-int getSwing(int swing){
-
-  #ifdef PROMISE_TUNER_ENABLED
-
-  // NOTE: Skip reading file-based error levels for ApproxHPVM wrapper runtime
-  if(!approxhpvm_runtime_mode){
-  
-    if(op_counter >= total_ops){
-      ERROR("No accuracy flag found \n");
-    }
-  
-    swing = op_accuracies[op_counter];
-    op_counter++;
-  }
-
-  #endif  
-
-   DEBUG("---- swing_value = %d \n", swing);  
-
-   return swing;
-}
-
-
-
-
-
-
-class PerfParams{
-
- public:
-  int row;
-  int col;
-  int skip_offset;
-
-  PerfParams(){
-    row = 1;
-    col = 1;
-    skip_offset = 0;
-  }
-  
-  PerfParams(int row1, int col1, int skip_offset1){
-    row = row1;
-    col = col1;
-    skip_offset = skip_offset1;
-  }
- 		
-};
-
-
-
-PerfParams getPerfParams(int swing){
-
-  std::map<int, PerfParams> perf_knob_map;
-
-  PerfParams params21(1, 2, 0);
-  perf_knob_map[21] = params21;
-
-  PerfParams params22(1, 2, 1);
-  perf_knob_map[22] = params22;
-
-  PerfParams params23(1, 3, 0);
-  perf_knob_map[23] = params23;
-
-  PerfParams params24(1, 3, 1);
-  perf_knob_map[24] = params24;
-
-  PerfParams params25(1, 3, 2);
-  perf_knob_map[25] = params25;
-
-  PerfParams params26(2, 1, 0);
-  perf_knob_map[26] = params26;
-
-  PerfParams params27(2, 1, 1);
-  perf_knob_map[27] = params27;
-
-  PerfParams params28(3, 1, 0);
-  perf_knob_map[28] = params28;
-
-  PerfParams params29(3, 1, 1);
-  perf_knob_map[29] = params29;
-
-  PerfParams params30(3, 1, 2);
-  perf_knob_map[30] = params30;
-
-  
-  return perf_knob_map[swing];
-  
-}
-
-
-
-
-class SampParams{
-
- public:  
-  int skip_rate;
-  int skip_offset;
-
-  SampParams(){
-    skip_rate = 1;
-    skip_offset = 0;
-  }
-  
-  SampParams(int skip_rate1, int skip_offset1){
-    skip_rate = skip_rate1;
-    skip_offset = skip_offset1;
-  }
- 		
-};
-
-
-
-SampParams getSampParams(int swing){
-
-  std::map<int, SampParams> samp_knob_map;
-
-  SampParams params31(2, 0);
-  samp_knob_map[31] = params31;
-
-  SampParams params32(2, 1);
-  samp_knob_map[32] = params32;
-
-  SampParams params33(4, 0);
-  samp_knob_map[33] = params33;
-
-  SampParams params34(4, 1);
-  samp_knob_map[34] = params34;
-
-  SampParams params35(4, 2);
-  samp_knob_map[35] = params35;
-
-  SampParams params36(4, 3);
-  samp_knob_map[36] = params36;
-
-  return samp_knob_map[swing];
-  
-}
-
-
-
-
-
-
-/***** API for Autotuner Use - Not the ApproxHPVM Wrapper API */
-
-
-// NOTE: code to compute the gold result - for norm computations
-//bool compute_norms = false; // true; //false;
-
-void* ConvLayer_PROMISE(void* input, float i_min, float i_max,
-			void* filter, float w_min, float w_max,
-			void* bias, float b_min, float b_max,
-			int conv_pad_h, int conv_pad_w,
-			int conv_stride_h, int conv_stride_w,
-			int pool_id, int pool_size,
-			int activation_id, // Relu, Tanh, ClipRelu
-			float out_min, float out_max, int swing){ 
-
-  if(ONLINE_PROFILING){
-    ERROR("Online Profiling cannot be enabled with PROMISE Simulation \n");
-  }
-
-
-
-  swing = getSwing(swing);  
- 
-  if(isPromise(swing)){
-    
-    return PROMISE_Conv(input, i_min, i_max,
-			filter, w_min, w_max,
-			bias, b_min, b_max,
-			conv_pad_h, conv_pad_w,
-			conv_stride_h, conv_stride_w,
-			pool_id, pool_size,
-			activation_id, 
-			out_min, out_max, swing);
-  }
-
-  
-  
-
-  
-  void* conv_out;
-  if(isPerforation(swing)){
- 
-    PerfParams params = getPerfParams(swing);
-    DEBUG("params.row = %d, params.col = %d, params.skip_offset = %d \n",
-	  params.row, params.col, params.skip_offset);
-    
-    conv_out = tensorConvPerfCudaHalf(input, filter,
-				      conv_pad_h, conv_pad_w,
-				      conv_stride_h, conv_stride_w, 1, 1,
-				      params.row, params.col, params.skip_offset);
-
-  }
-
-  if(isSampling(swing)){
- 
-    SampParams params = getSampParams(swing);
-    DEBUG("params.skip_rate = %d, params.skip_offset = %d \n",
-	  params.skip_rate, params.skip_offset);
-
-    /*
-    conv_out = tensorConvolutionKernelSamp(input, filter,
-					   conv_pad_h, conv_pad_w,
-					   conv_stride_h, conv_stride_w,
-					   1, 1,
-					   2);
-    */
-
-    
-    conv_out = tensorConvSampSim(input, filter,
-				 conv_pad_h, conv_pad_w,
-				 conv_stride_h, conv_stride_w, 1, 1,
-				 params.skip_rate, params.skip_offset);
-
-    
-    /* conv_out = tensorConvApproxHalf(input, filter,
-				 conv_pad_h, conv_pad_w,
-				 conv_stride_h, conv_stride_w, 1, 1,
-				 1,1,
-				 4, 3);
-    // params.skip_rate, params.skip_offset);
-    */
-    
-  }
-  
-
-  if (isHalfPrecision(swing)){
-
-    conv_out = tensorHalfConvolution(input, filter,
-				     conv_pad_h, conv_pad_w,
-				     conv_stride_h, conv_stride_w,
-				     1, 0);
-  }
-
-  if (isFullPrecision(swing)){
-    conv_out = tensorConvolution(input, filter,
-				 conv_pad_h, conv_pad_w,
-				 conv_stride_h, conv_stride_w,
-				 1, 0);
-  }
-
- 
-  
-
-  
-  void* conv_add;
-  if(bias != NULL){
-    if( !isFullPrecision(swing) ){  
-      conv_add = tensorHalfAdd(conv_out, bias);
-    }
-    else{
-      conv_add = tensorAdd(conv_out, bias);
-    }
-  }
-  else{
-    conv_add = conv_out;
-  }
-
-  void* pool_out;
-  if(pool_size > 0){
-    //FIXME: Currently only using MaxPooling
-    pool_out = tensorHalfPooling(conv_add, 0, pool_size, pool_size,
-				 0, 0, pool_size, pool_size);
-  }
-  else{
-    pool_out = conv_add;
-  }
-  
-  void* activation_out;  
-  switch(activation_id){
-  case -1:
-    activation_out = pool_out;
-    INFO("NO Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorHalfTanh(pool_out);
-    break;
-  case 1:
-    activation_out = tensorHalfRelu(pool_out);
-    break;
-  case 2:
-    activation_out = tensorHalfRelu2(pool_out, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-
-  
-  return activation_out;
-}
-
-
-void* FCLayer_PROMISE(void* input, float i_min, float i_max,
-		      void* weights, float w_min, float w_max,
-		      void* bias, float b_min, float b_max,
-		      int activation_id,
-		      float out_min, float out_max, int swing){ //NOTE: min_val, max_val apply to 'ClippedRelu'
-
-
-  swing = getSwing(swing);
-  
-  if(isPromise(swing)){
-
-    return PROMISE_FC(input, i_min, i_max,
-		      weights, w_min, w_max,
-		      bias, b_min, b_max,
-		      activation_id,
-		      out_min, out_max, swing);
-  }
-
-
-  
-  void* gemm_out;
-  if(!isFullPrecision(swing)){
-    gemm_out = tensorHalfGemm(input, weights);
-  }
-  else{
-    gemm_out = tensorGemmGPU(input, weights);
-  }
-
-  
-  void* gemmbias_out;
-  if(bias != NULL){
-    // Swing 8 corresponds to FP32
-    if(!isFullPrecision(swing)){
-      gemmbias_out = tensorHalfAdd(gemm_out, bias);
-    }
-    else{
-      gemmbias_out = tensorAdd(gemm_out, bias);
-    }
-  }
-  else{
-    gemmbias_out = gemm_out;
-  }
- 
-  void* activation_out;
-  switch(activation_id){
-
-  case -1:
-    activation_out = gemmbias_out;
-    INFO("No Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorHalfTanh(gemmbias_out);
-    break;
-  case 1:
-    activation_out = tensorHalfRelu(gemmbias_out);
-    break;
-  case 2:
-    activation_out = tensorHalfRelu2(gemmbias_out, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-  
-
-  
-  
-  return activation_out;
-}
-
-#endif
-
-
-
-#ifdef OLD_MODEL
-
-#endif
-
-#endif 
-
-
-
-/************* NOTE: Outdated PROMISE routines - Used for Comparison ****/
-
-  
-
-
-/*
-
-
-
-void* ConvLayer_PROMISE(void* input, float i_min, float i_max,
-			void* filter, float w_min, float w_max,
-			void* bias, float b_min, float b_max,
-			int conv_pad_h, int conv_pad_w, int conv_stride_h, int conv_stride_w,
-			int pool_id, int pool_size,
-			int activation_id, // Relu, Tanh, ClipRelu
-			float out_min, float out_max, int swing){ 
-
-
-  DEBUG("\n\n**** NOTE: Conv OLD MODEL *** \n\n");
-  
-  #ifdef PROMISE_TUNER_ENABLED
-
-  // NOTE: Skip reading file-based error levels for ApproxHPVM wrapper runtime
-  if(!approxhpvm_runtime_mode){
-  
-    if(op_counter >= total_ops){
-      ERROR("No accuracy flag found \n");
-    }
-  
-    swing = op_accuracies[op_counter];
-    op_counter++;
-  }
-  
-  #endif  
-
-  
-  if (swing < 0 || swing > 20){
-    ERROR("Incorrect swing value");
-  }
-
-  
-
-  if(swing < 8){
-    input = quantizeTensorPromise(input, i_min, i_max);
-    filter = quantizeTensorPromise(filter, w_min, w_max);
-    if(bias != NULL)
-      bias = quantizeTensorPromise(bias, b_min, b_max);
-    // aRead error
-    
-    input = addPromiseError(input, swing);
-  }
-
-  
-  void* conv_out;
-  if(swing == 8 || (swing >= 12 && swing <= 15) ){
-    //conv_out = tensorConvPerf(input, filter, conv_pad_h, conv_pad_w,
-    //		              conv_stride_h, conv_stride_w, 1, 1, 1, 0);
-
-    int rows = 2;
-    switch(swing){
-
-    case 12: rows = 5; break;
-    case 13: rows = 4; break;
-    case 14: rows = 3; break;
-    case 15: rows = 2; break;    
-		   
-    default: rows = 2; break;
-    }
-    
-    conv_out = tensorConvPerfSim(input, filter, conv_pad_h, conv_pad_w,
-				 conv_stride_h, conv_stride_w, 1, 1, rows, 0);
-
-    /*void* gold = tensorConvolution(input, filter,
-				   conv_pad_h, conv_pad_w,
-				   conv_stride_h, conv_stride_w,
-				   1, 0);
-
-    Norm_t* norms = calculateNormsTreeReduction((struct Tensor*) conv_out, (struct Tensor*) gold);
-
-    DEBUG("\n-------- l2_norm = %f \n", norms->l2_norm); 
-    */
-
-/*-------------
-  }
-  else if(swing == 9 || (swing >= 16 && swing <= 19) ){
-    //conv_out = tensorConvPerf(input, filter, conv_pad_h, conv_pad_w,
-    //		              conv_stride_h, conv_stride_w, 1, 1, 0, 1);
-
-
-    int cols = 2;
-    switch(swing){
-
-    case 16: cols = 5; break;
-    case 17: cols = 4; break;
-    case 18: cols = 3; break;
-    case 19: cols = 2; break;    
-		   
-    default: cols = 2; break;
-    }
-
-    
-    conv_out = tensorConvPerfSim(input, filter, conv_pad_h, conv_pad_w,
-				 conv_stride_h, conv_stride_w, 1, 1, 0, cols);
-
-
-    /*void* gold = tensorConvolution(input, filter,
-				   conv_pad_h, conv_pad_w,
-				   conv_stride_h, conv_stride_w,
-				   1, 0);
-
-    Norm_t* norms = calculateNormsTreeReduction((struct Tensor*)conv_out, (struct Tensor*) gold);
-
-    DEBUG("\n-------- l2_norm = %f \n", norms->l2_norm); 
-    */
-
-/*-----
-
-  }
-  else if(swing == 10){  
-    conv_out = tensorHalfConvolution(input, filter,
-				     conv_pad_h, conv_pad_w,
-				     conv_stride_h, conv_stride_w,
-				     1, 0);
-  }
-  else{
-    conv_out = tensorConvolution(input, filter,
-				 conv_pad_h, conv_pad_w,
-				 conv_stride_h, conv_stride_w,
-				 1, 0);
-  }
-  
-  void* conv_add;
-  if(bias != NULL){
-    if(swing >= 8){  
-      conv_add = tensorHalfAdd(conv_out, bias);
-    }
-    else{
-      conv_add = tensorAdd(conv_out, bias);
-    }
-  }
-  else{
-    conv_add = conv_out;
-  }
-
-  void* pool_out;
-  // NOTE: Skip pooling on negative pool sizes
-  if(pool_size > 0){
-    //FIXME: Currently only using MaxPooling
-    pool_out = tensorPooling(conv_add, 0, pool_size, pool_size, 0, 0, pool_size, pool_size);
-  }
-  else{
-    pool_out = conv_add;
-  }
-  
-  void* activation_out;  
-  switch(activation_id){
-  case -1:
-    activation_out = pool_out;
-    INFO("NO Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorTanh(pool_out);
-    break;
-  case 1:
-    activation_out = tensorRelu(pool_out);
-    break;
-  case 2:
-    activation_out = tensorHalfRelu2(pool_out, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-
-
-  if(swing < 8 && activation_id != -1){
-    activation_out = quantizeTensorPromise(activation_out, out_min, out_max);
-  }
-  
-  return activation_out;
-}
-
-
-void* FCLayer_PROMISE(void* input, float i_min, float i_max,
-		      void* weights, float w_min, float w_max,
-		      void* bias, float b_min, float b_max,
-		      int activation_id,
-		      float out_min, float out_max, int swing){ 
-
-
-  
-  #ifdef PROMISE_TUNER_ENABLED
-
-  // NOTE: Skip reading file-based error levels for ApproxHPVM wrapper runtime
-  if(!approxhpvm_runtime_mode){
-
-    if(op_counter >= total_ops){
-      ERROR("No accuracy flag found \n");
-    }
-  
-    swing = op_accuracies[op_counter];
-    op_counter++;
-  }
-  
-  #endif
- 
-  
-  if (swing < 0 || swing > 20){
-    ERROR("Incorrect swing value");
-  }
-  
-  if(swing < 8){
-    input = quantizeTensorPromise(input, i_min, i_max);
-    weights = quantizeTensorPromise(weights, w_min, w_max);
-    if(bias != NULL)
-      bias = quantizeTensorPromise(bias, b_min, b_max);
-
-    // NOTE: Modelling aRead error in PROMISE
-    input = addPromiseError(input, swing);
-  }
-
-
-  
-  void* gemm_out;
-  if(swing >= 8 && swing < 11){
-    gemm_out = tensorHalfGemm(input, weights);
-  }
-  else{
-    gemm_out = tensorGemmGPU(input, weights);
-  }
-
-  
-  void* gemmbias_out;
-  if(bias != NULL){
-    // Swing 8 corresponds to FP32
-    if(swing >= 8 && swing < 20){
-      gemmbias_out = tensorHalfAdd(gemm_out, bias);
-    }
-    else{
-      gemmbias_out = tensorAdd(gemm_out, bias);
-    }
-  }
-  else{
-    gemmbias_out = gemm_out;
-  }
- 
-  void* activation_out;
-  switch(activation_id){
-
-  case -1:
-    activation_out = gemmbias_out;
-    INFO("No Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorTanh(gemmbias_out);
-    break;
-  case 1:
-    activation_out = tensorRelu(gemmbias_out);
-    break;
-  case 2:
-    activation_out = tensorRelu2(gemmbias_out, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-  
-  
-  if(swing < 8 && activation_id != -1){
-    activation_out = quantizeTensorPromise(activation_out, out_min, out_max);
-  }
-  
-  return activation_out;
-}
-
-#endif
-
-
-
-
-
-
-#endif
-
-
-
-
-
-
-
-
-    DEBUG("\n-------- l2_norm = %f \n", norms->l2_norm); 
-    */
-  
-
-    /*void* gold = tensorConvolution(input, filter,
-				   conv_pad_h, conv_pad_w,
-				   conv_stride_h, conv_stride_w,
-				   1, 0);
-
-    Norm_t* norms = calculateNormsTreeReduction((struct Tensor*)conv_out, (struct Tensor*) gold);
-
-    DEBUG("\n-------- l2_norm = %f \n", norms->l2_norm); 
-    */
-
-
-
-
-
-
-  /*#ifdef PROMISE_TUNER_ENABLED
-
-  // NOTE: Skip reading file-based error levels for ApproxHPVM wrapper runtime
-  if(!approxhpvm_runtime_mode){
-
-    if(op_counter >= total_ops){
-      ERROR("No accuracy flag found \n");
-    }
-  
-    swing = op_accuracies[op_counter];
-    op_counter++;
-  }
-  
-  #endif
-
-  */
-
- 
-/*  void* gold;
-  if (compute_norms){
-
-    gold = tensorConvolution(input, filter,
-			     conv_pad_h, conv_pad_w,
-			     conv_stride_h, conv_stride_w,
-			     1, 0);
-  }
-
-
-
-  
-  if (compute_norms){
-
-    Norm_t* norms = calculateNormsTreeReduction((struct Tensor*) conv_out,
-						(struct Tensor*) gold);
-    add_norms(norms, "tensorConv", swing);
-    add_conv_overheads(input, filter, conv_stride_h, conv_stride_w, swing);
-  }
-
-
-
-  if (compute_norms){
-    Norm_t* norms = calculateNormsTreeReduction((struct Tensor*) activation_out,
-						(struct Tensor*) activation_out);
-    add_norms(norms, "tensorMul", swing);
-    add_gemm_overheads(input, weights, swing);
-  }
-
-
-
-*/
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_techniques.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_techniques.h
deleted file mode 100644
index 70f89a4a97106cee389a6b220054400306dac9f1..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_techniques.h
+++ /dev/null
@@ -1,2101 +0,0 @@
-
-
-
-#include "tensor_utils.cu"
-
-
-
-__global__ void depthwise_conv(float* const __restrict__ y,
-			       const float* const __restrict__ x,
-			       const float* const __restrict__ w,
-			       const int B, const int M,
-			       const int H, const int W, const int KH,
-			       const int KW, const int H_out, const int W_out,
-			       const int H_pad, const int W_pad,
-			       const int H_stride, const int W_stride, const int start_batch)
-{
-
-  #define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-  #define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-  const int num = 1;
-
-  const int b = num * blockIdx.x + start_batch;
-  const int m = blockIdx.y; //current filter/channel
-
-  const int tx = threadIdx.x;
-
-  const int start_h = (threadIdx.x / W_out) * H_stride - H_pad;
-  const int start_w = (threadIdx.x % W_out) * W_stride - W_pad;
-
-  float C[num] = { 0 };
-
-  const float* weights = &w[m * KH * KW];
-
-  for (int k = 0; k < KH * KW; k++) {
-    int p = k / KW;
-    int q = k % KW;
-
-    #pragma unroll
-    for (int i = 0; i < num; i++) {
-      if (start_h + p > -1 && start_h + p < H &&
-	  start_w + q > -1 && start_w + q < W) {
-
-	C[i] += x4d(b + i, m, start_h + p, start_w + q) * weights[k];
-      }
-
-    }
-  }
-
-  #pragma unroll
-  for (int i = 0; i < num; i++) {
-    if(b + i < B)
-      y4d(b + i, m, 0, tx) = C[i];
-
-  }
-	
-
-  #undef y4d 
-  #undef x4d
-}
-
-
-__global__ void depthwise_convNew(float* const __restrict__ y,
-				  const float* const __restrict__ x,
-				  const float* const __restrict__ w,
-				  const int B, const int M,
-				  const int H, const int W, const int KH,
-				  const int KW, const int H_out, const int W_out,
-				  const int H_pad, const int W_pad,
-				  const int H_stride, const int W_stride)
-{
-
-  #define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-  #define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-  const int num = 12;
-
-  const int b = num * blockIdx.x;
-  const int m = (blockIdx.y * blockDim.x  + threadIdx.x)/ (H_out * W_out); 
-
-  const int tx = (blockIdx.y * blockDim.x  + threadIdx.x) % (H_out * W_out);
-
-  const int start_h = (tx / W_out) * H_stride - H_pad;
-  const int start_w = (tx % W_out) * W_stride - W_pad;
-
-  float C[num] = { 0 };
-
-  const float* weights = &w[m * KH * KW];
-
-  for (int k = 0; k < KH * KW; k++) {
-    int p = k / KW;
-    int q = k % KW;
-
-    if (start_h + p > -1 && start_h + p < H &&
-	start_w + q > -1 && start_w + q < W) {
-
-      #pragma unroll
-      for (int i = 0; i < num; i++) {
-	if(b + i < B)
-	  C[i] += x4d(b + i, m, start_h + p, start_w + q) * weights[k];
-      }
-
-    }
-  }
-
-  #pragma unroll
-  for (int i = 0; i < num; i++) {
-    if(b + i < B)
-      y4d(b + i, m, 0, tx) = C[i];
-
-  }
-	
-
-  #undef y4d 
-  #undef x4d
-}
-
-__global__ void depthwise_convNew8(float* const __restrict__ y,
-				   const float* const __restrict__ x,
-				   const float* const __restrict__ w,
-				   const int B, const int M,
-				   const int H, const int W, const int KH,
-				   const int KW, const int H_out, const int W_out,
-				   const int H_pad, const int W_pad,
-				   const int H_stride, const int W_stride)
-{
-
-  #define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-  #define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-  const int num = 8;
-
-  const int b = num * blockIdx.x;
-  const int m = (blockIdx.y * blockDim.x  + threadIdx.x)/ (H_out * W_out);
-	
-  if(m < M){
-    const int tx = (blockIdx.y * blockDim.x  + threadIdx.x) % (H_out * W_out);
-
-    const int start_h = (tx / W_out) * H_stride - H_pad;
-    const int start_w = (tx % W_out) * W_stride - W_pad;
-
-    float c0 = 0;
-    float c1 = 0;
-    float c2 = 0;
-    float c3 = 0;
-    float c4 = 0;
-    float c5 = 0;
-    float c6 = 0;
-    float c7 = 0;
-	
-    const float* weights = &w[m * KH * KW];
-
-    for (int k = 0; k < KH * KW; k++) {
-      int p = k / KW;
-      int q = k % KW;
-
-      if (start_h + p > -1 && start_h + p < H &&
-	  start_w + q > -1 && start_w + q < W) {
-
-	c0 += x4d(b, m, start_h + p, start_w + q) * weights[k];
-	if(b + 1 < B)
-	  c1 += x4d(b + 1, m, start_h + p, start_w + q) * weights[k];
-	if(b + 2 < B)
-	  c2 += x4d(b + 2, m, start_h + p, start_w + q) * weights[k];
-	if(b + 3 < B)
-	  c3 += x4d(b + 3, m, start_h + p, start_w + q) * weights[k];
-	if(b + 4 < B)
-	  c4 += x4d(b + 4, m, start_h + p, start_w + q) * weights[k];
-	if(b + 5 < B)
-	  c5 += x4d(b + 5, m, start_h + p, start_w + q) * weights[k];
-	if(b + 6 < B)
-	  c6 += x4d(b + 6, m, start_h + p, start_w + q) * weights[k];
-	if(b + 7 < B)
-	  c7 += x4d(b + 7, m, start_h + p, start_w + q) * weights[k];
-    
-
-      }
-    }
-
-    y4d(b, m, 0, tx) = c0;	
-    if(b + 1 < B)
-      y4d(b + 1, m, 0, tx) = c1;
-    if(b + 2 < B)
-      y4d(b + 2, m, 0, tx) = c2;
-    if(b + 3 < B)
-      y4d(b + 3, m, 0, tx) = c3;
-    if(b + 4 < B)
-      y4d(b + 4, m, 0, tx) = c4;
-    if(b + 5 < B)
-      y4d(b + 5, m, 0, tx) = c5;
-    if(b + 6 < B)
-      y4d(b + 6, m, 0, tx) = c6;
-    if(b + 7 < B)
-      y4d(b + 7, m, 0, tx) = c7;
-  }
-	
-  #undef y4d 
-  #undef x4d
-}
-
-__global__ void depthwise_convNew8_half(__half* const __restrict__ y,
-					const __half* const __restrict__ x,
-					const __half* const __restrict__ w,
-					const int B, const int M,
-					const int H, const int W, const int KH,
-					const int KW, const int H_out, const int W_out,
-					const int H_pad, const int W_pad,
-					const int H_stride, const int W_stride)
-{
-
-  #define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-  #define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-  const int num = 8;
-
-  const int b = num * blockIdx.x;
-  const int m = (blockIdx.y * blockDim.x  + threadIdx.x)/ (H_out * W_out);
-	
-  if(m < M){
-    const int tx = (blockIdx.y * blockDim.x  + threadIdx.x) % (H_out * W_out);
-
-    const int start_h = (tx / W_out) * H_stride - H_pad;
-    const int start_w = (tx % W_out) * W_stride - W_pad;
-
-    __half c0 = 0;
-    __half c1 = 0;
-    __half c2 = 0;
-    __half c3 = 0;
-    __half c4 = 0;
-    __half c5 = 0;
-    __half c6 = 0;
-    __half c7 = 0;
-	
-    const __half* weights = &w[m * KH * KW];
-
-    for (int k = 0; k < KH * KW; k++) {
-      int p = k / KW;
-      int q = k % KW;
-
-      if (start_h + p > -1 && start_h + p < H &&
-	  start_w + q > -1 && start_w + q < W) {
-
-	c0 = __hfma(x4d(b, m, start_h + p, start_w + q), weights[k], c0);
-	if(b + 1 < B)
-	  c1 = __hfma(x4d(b + 1, m, start_h + p, start_w + q), weights[k], c1);
-	if(b + 2 < B)
-	  c2 = __hfma(x4d(b + 2, m, start_h + p, start_w + q), weights[k], c2);
-	if(b + 3 < B)
-	  c3 = __hfma(x4d(b + 3, m, start_h + p, start_w + q), weights[k], c3);
-	if(b + 4 < B)
-	  c4 = __hfma(x4d(b + 4, m, start_h + p, start_w + q), weights[k], c4);
-	if(b + 5 < B)
-	  c5 = __hfma(x4d(b + 5, m, start_h + p, start_w + q), weights[k], c5);
-	if(b + 6 < B)
-	  c6 = __hfma(x4d(b + 6, m, start_h + p, start_w + q), weights[k], c6);
-	if(b + 7 < B)
-	  c7 = __hfma(x4d(b + 7, m, start_h + p, start_w + q), weights[k], c7);
-    
-
-      }
-    }
-
-    y4d(b, m, 0, tx) = c0;	
-    if(b + 1 < B)
-      y4d(b + 1, m, 0, tx) = c1;
-    if(b + 2 < B)
-      y4d(b + 2, m, 0, tx) = c2;
-    if(b + 3 < B)
-      y4d(b + 3, m, 0, tx) = c3;
-    if(b + 4 < B)
-      y4d(b + 4, m, 0, tx) = c4;
-    if(b + 5 < B)
-      y4d(b + 5, m, 0, tx) = c5;
-    if(b + 6 < B)
-      y4d(b + 6, m, 0, tx) = c6;
-    if(b + 7 < B)
-      y4d(b + 7, m, 0, tx) = c7;
-  }
-	
-  #undef y4d 
-  #undef x4d
-}
-
-__global__ void depthwise_convNew8_half1(__half* const __restrict__ y,
-					const __half* const __restrict__ x,
-					const __half* const __restrict__ w,
-					const int B, const int M,
-					const int H, const int W, const int KH,
-					const int KW, const int H_out, const int W_out,
-					const int H_pad, const int W_pad,
-					const int H_stride, const int W_stride)
-{
-
-  #define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-  #define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-  const int num = 8;
-
-  const int b = num * blockIdx.x;
-  const int m = (blockIdx.y * blockDim.x  + threadIdx.x)/ (H_out * W_out);
-	
-  if(m < M){
-    const int tx = (blockIdx.y * blockDim.x  + threadIdx.x) % (H_out * W_out);
-
-    const int start_h = (tx / W_out) * H_stride - H_pad;
-    const int start_w = (tx % W_out) * W_stride - W_pad;
-
-    __half c0 = 0;
-    __half c1 = 0;
-    __half c2 = 0;
-    __half c3 = 0;
-    __half c4 = 0;
-    __half c5 = 0;
-    __half c6 = 0;
-    __half c7 = 0;
-	
-    const __half* weights = &w[m * KH * KW];
-
-    for (int k = 0; k < KH * KW; k++) {
-      int p = k / KW;
-      int q = k % KW;
-
-      if (start_h + p > -1 && start_h + p < H &&
-	  start_w + q > -1 && start_w + q < W) {
-
-	c0 = __hfma(x4d(b, m, start_h + p, start_w + q), weights[k], c0);
-      }
-    }
-
-    if(b + 1 < B){
-      for (int k = 0; k < KH * KW; k++) {
-	int p = k / KW;
-	int q = k % KW;
-
-	if (start_h + p > -1 && start_h + p < H &&
-	    start_w + q > -1 && start_w + q < W) {
-
-	  c1 = __hfma(x4d(b + 1, m, start_h + p, start_w + q), weights[k], c1);
-	}
-      }
-    }
-
-    if(b + 2 < B){
-      for (int k = 0; k < KH * KW; k++) {
-	int p = k / KW;
-	int q = k % KW;
-
-	if (start_h + p > -1 && start_h + p < H &&
-	    start_w + q > -1 && start_w + q < W) {
-
-	  c2 = __hfma(x4d(b + 2, m, start_h + p, start_w + q), weights[k], c2);
-	}
-      }
-    }
-
-    if(b + 3 < B){
-      for (int k = 0; k < KH * KW; k++) {
-	int p = k / KW;
-	int q = k % KW;
-
-	if (start_h + p > -1 && start_h + p < H &&
-	    start_w + q > -1 && start_w + q < W) {
-
-	  c3 = __hfma(x4d(b + 3, m, start_h + p, start_w + q), weights[k], c3);
-	}
-      }
-    }
-
-    if(b + 4 < B){
-      for (int k = 0; k < KH * KW; k++) {
-	int p = k / KW;
-	int q = k % KW;
-
-	if (start_h + p > -1 && start_h + p < H &&
-	    start_w + q > -1 && start_w + q < W) {
-
-	  c4 = __hfma(x4d(b + 4, m, start_h + p, start_w + q), weights[k], c4);
-	}
-      }
-    }
-
-    if(b + 5 < B){
-      for (int k = 0; k < KH * KW; k++) {
-	int p = k / KW;
-	int q = k % KW;
-
-	if (start_h + p > -1 && start_h + p < H &&
-	    start_w + q > -1 && start_w + q < W) {
-
-	  c5 = __hfma(x4d(b + 5, m, start_h + p, start_w + q), weights[k], c5);
-	}
-      }
-    }
-
-    if(b + 6 < B){
-      for (int k = 0; k < KH * KW; k++) {
-	int p = k / KW;
-	int q = k % KW;
-
-	if (start_h + p > -1 && start_h + p < H &&
-	    start_w + q > -1 && start_w + q < W) {
-
-	  c6 = __hfma(x4d(b + 6, m, start_h + p, start_w + q), weights[k], c6);
-	}
-      }
-    }
-
-    if(b + 7 < B){
-      for (int k = 0; k < KH * KW; k++) {
-	int p = k / KW;
-	int q = k % KW;
-
-	if (start_h + p > -1 && start_h + p < H &&
-	    start_w + q > -1 && start_w + q < W) {
-
-	  c7 = __hfma(x4d(b + 7, m, start_h + p, start_w + q), weights[k], c7);
-	}
-      }
-    }
-
-    
-
-    y4d(b, m, 0, tx) = c0;	
-    if(b + 1 < B)
-      y4d(b + 1, m, 0, tx) = c1;
-    if(b + 2 < B)
-      y4d(b + 2, m, 0, tx) = c2;
-    if(b + 3 < B)
-      y4d(b + 3, m, 0, tx) = c3;
-    if(b + 4 < B)
-      y4d(b + 4, m, 0, tx) = c4;
-    if(b + 5 < B)
-      y4d(b + 5, m, 0, tx) = c5;
-    if(b + 6 < B)
-      y4d(b + 6, m, 0, tx) = c6;
-    if(b + 7 < B)
-      y4d(b + 7, m, 0, tx) = c7;
-  }
-	
-  #undef y4d 
-  #undef x4d
-}
-
-
-__global__ void depthwise_convNew12(float* const __restrict__ y,
-				    const float* const __restrict__ x,
-				    const float* const __restrict__ w,
-				    const int B, const int M,
-				    const int H, const int W, const int KH,
-				    const int KW, const int H_out, const int W_out,
-				    const int H_pad, const int W_pad,
-				    const int H_stride, const int W_stride)
-{
-
-  #define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-  #define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-  const int num = 12;
-
-  const int b = num * blockIdx.x;
-  const int m = (blockIdx.y * blockDim.x  + threadIdx.x)/ (H_out * W_out);
-	
-  if(m < M){
-    const int tx = (blockIdx.y * blockDim.x  + threadIdx.x) % (H_out * W_out);
-
-    const int start_h = (tx / W_out) * H_stride - H_pad;
-    const int start_w = (tx % W_out) * W_stride - W_pad;
-
-    float c0 = 0;
-    float c1 = 0;
-    float c2 = 0;
-    float c3 = 0;
-    float c4 = 0;
-    float c5 = 0;
-    float c6 = 0;
-    float c7 = 0;
-    float c8 = 0;
-    float c9 = 0;
-    float c10 = 0;
-    float c11 = 0;
-	
-    const float* weights = &w[m * KH * KW];
-
-    for (int k = 0; k < KH * KW; k++) {
-      int p = k / KW;
-      int q = k % KW;
-
-      if (start_h + p > -1 && start_h + p < H &&
-	  start_w + q > -1 && start_w + q < W) {
-
-	c0 += x4d(b, m, start_h + p, start_w + q) * weights[k];
-	if(b + 1 < B)
-	  c1 += x4d(b + 1, m, start_h + p, start_w + q) * weights[k];
-	if(b + 2 < B)
-	  c2 += x4d(b + 2, m, start_h + p, start_w + q) * weights[k];
-	if(b + 3 < B)
-	  c3 += x4d(b + 3, m, start_h + p, start_w + q) * weights[k];
-	if(b + 4 < B)
-	  c4 += x4d(b + 4, m, start_h + p, start_w + q) * weights[k];
-	if(b + 5 < B)
-	  c5 += x4d(b + 5, m, start_h + p, start_w + q) * weights[k];
-	if(b + 6 < B)
-	  c6 += x4d(b + 6, m, start_h + p, start_w + q) * weights[k];
-	if(b + 7 < B)
-	  c7 += x4d(b + 7, m, start_h + p, start_w + q) * weights[k];
-	if(b + 8 < B)
-	  c8 += x4d(b + 8, m, start_h + p, start_w + q) * weights[k];
-	if(b + 9 < B)
-	  c9 += x4d(b + 9, m, start_h + p, start_w + q) * weights[k];
-	if(b + 10 < B)
-	  c10 += x4d(b + 10, m, start_h + p, start_w + q) * weights[k];
-	if(b + 11 < B)
-	  c11 += x4d(b + 11, m, start_h + p, start_w + q) * weights[k];
-    
-
-      }
-    }
-
-    y4d(b, m, 0, tx) = c0;	
-    if(b + 1 < B)
-      y4d(b + 1, m, 0, tx) = c1;
-    if(b + 2 < B)
-      y4d(b + 2, m, 0, tx) = c2;
-    if(b + 3 < B)
-      y4d(b + 3, m, 0, tx) = c3;
-    if(b + 4 < B)
-      y4d(b + 4, m, 0, tx) = c4;
-    if(b + 5 < B)
-      y4d(b + 5, m, 0, tx) = c5;
-    if(b + 6 < B)
-      y4d(b + 6, m, 0, tx) = c6;
-    if(b + 7 < B)
-      y4d(b + 7, m, 0, tx) = c7;
-    if(b + 8 < B)
-      y4d(b + 8, m, 0, tx) = c8;
-    if(b + 9 < B)
-      y4d(b + 9, m, 0, tx) = c9;
-    if(b + 10 < B)
-      y4d(b + 10, m, 0, tx) = c10;
-    if(b + 11 < B)
-      y4d(b + 11, m, 0, tx) = c11;
-	
-  }
-	
-  #undef y4d 
-  #undef x4d
-}
-
-
-__global__ void depthwise_convNew12_half(__half* const __restrict__ y,
-				    const __half* const __restrict__ x,
-				    const __half* const __restrict__ w,
-				    const int B, const int M,
-				    const int H, const int W, const int KH,
-				    const int KW, const int H_out, const int W_out,
-				    const int H_pad, const int W_pad,
-				    const int H_stride, const int W_stride)
-{
-
-  #define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-  #define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-  const int num = 12;
-
-  const int b = num * blockIdx.x;
-  const int m = (blockIdx.y * blockDim.x  + threadIdx.x)/ (H_out * W_out);
-	
-  if(m < M){
-    const int tx = (blockIdx.y * blockDim.x  + threadIdx.x) % (H_out * W_out);
-
-    const int start_h = (tx / W_out) * H_stride - H_pad;
-    const int start_w = (tx % W_out) * W_stride - W_pad;
-
-    __half c0 = 0;
-    __half c1 = 0;
-    __half c2 = 0;
-    __half c3 = 0;
-    __half c4 = 0;
-    __half c5 = 0;
-    __half c6 = 0;
-    __half c7 = 0;
-    __half c8 = 0;
-    __half c9 = 0;
-    __half c10 = 0;
-    __half c11 = 0;
-	
-    const __half* weights = &w[m * KH * KW];
-
-    for (int k = 0; k < KH * KW; k++) {
-      int p = k / KW;
-      int q = k % KW;
-
-      if (start_h + p > -1 && start_h + p < H &&
-	  start_w + q > -1 && start_w + q < W) {
-
-	c0 = __hfma(x4d(b, m, start_h + p, start_w + q), weights[k], c0);
-	if(b + 1 < B)
-	  c1 = __hfma(x4d(b + 1, m, start_h + p, start_w + q), weights[k], c1);
-	if(b + 2 < B)
-	  c2 = __hfma(x4d(b + 2, m, start_h + p, start_w + q), weights[k], c2);
-	if(b + 3 < B)
-	  c3 = __hfma(x4d(b + 3, m, start_h + p, start_w + q), weights[k], c3);
-	if(b + 4 < B)
-	  c4 = __hfma(x4d(b + 4, m, start_h + p, start_w + q), weights[k], c4);
-	if(b + 5 < B)
-	  c5 = __hfma(x4d(b + 5, m, start_h + p, start_w + q), weights[k], c5);
-	if(b + 6 < B)
-	  c6 = __hfma(x4d(b + 6, m, start_h + p, start_w + q), weights[k], c6);
-	if(b + 7 < B)
-	  c7 = __hfma(x4d(b + 7, m, start_h + p, start_w + q), weights[k], c7);
-	if(b + 8 < B)
-	  c8 = __hfma(x4d(b + 8, m, start_h + p, start_w + q), weights[k], c8);
-	if(b + 9 < B)
-	  c9 = __hfma(x4d(b + 9, m, start_h + p, start_w + q), weights[k], c9);
-	if(b + 10 < B)
-	  c10 = __hfma(x4d(b + 10, m, start_h + p, start_w + q), weights[k], c10);
-	if(b + 11 < B)
-	  c11 = __hfma(x4d(b + 11, m, start_h + p, start_w + q), weights[k], c11);
-    
-
-      }
-    }
-
-    y4d(b, m, 0, tx) = c0;	
-    if(b + 1 < B)
-      y4d(b + 1, m, 0, tx) = c1;
-    if(b + 2 < B)
-      y4d(b + 2, m, 0, tx) = c2;
-    if(b + 3 < B)
-      y4d(b + 3, m, 0, tx) = c3;
-    if(b + 4 < B)
-      y4d(b + 4, m, 0, tx) = c4;
-    if(b + 5 < B)
-      y4d(b + 5, m, 0, tx) = c5;
-    if(b + 6 < B)
-      y4d(b + 6, m, 0, tx) = c6;
-    if(b + 7 < B)
-      y4d(b + 7, m, 0, tx) = c7;
-    if(b + 8 < B)
-      y4d(b + 8, m, 0, tx) = c8;
-    if(b + 9 < B)
-      y4d(b + 9, m, 0, tx) = c9;
-    if(b + 10 < B)
-      y4d(b + 10, m, 0, tx) = c10;
-    if(b + 11 < B)
-      y4d(b + 11, m, 0, tx) = c11;
-	
-  }
-	
-  #undef y4d 
-  #undef x4d
-}
-
-
-__global__ void depthwise_convNew8_half2(__half* const __restrict__ y,
-					const __half* const __restrict__ x,
-					const __half* const __restrict__ w,
-					const int B, const int M,
-					const int H, const int W, const int KH,
-					const int KW, const int H_out, const int W_out,
-					const int H_pad, const int W_pad,
-					const int H_stride, const int W_stride)
-{
-
-  #define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-  #define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-  const int num = 8;
-
-  const int b = num * blockIdx.x;
-  const int m = (blockIdx.y * blockDim.x  + threadIdx.x)/ (H_out * W_out);
-	
-  if(m < M){
-    const int tx = (blockIdx.y * blockDim.x  + threadIdx.x) % (H_out * W_out);
-
-    const int start_h = (tx / W_out) * H_stride - H_pad;
-    const int start_w = (tx % W_out) * W_stride - W_pad;
-
-    __half2 c0 = __half2half2(0);
-    __half2 c1 = __half2half2(0);
-    __half2 c2 = __half2half2(0);
-    __half2 c3 = __half2half2(0);
-    	
-    const __half* weights = &w[m * KH * KW];
-
-    for (int k = 0; k < KH * KW; k++) {
-      int p = k / KW;
-      int q = k % KW;
-      if (start_h + p > -1 && start_h + p < H &&
-	  start_w + q > -1 && start_w + q < W) {
-
-      
-	__half2 t1;
-	__half2 t2;
-	__half2 t3;
-	__half2 t4;
-	if(b + 7 < B){
-	  t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	  t2 = __halves2half2(x4d(b + 3, m, start_h + p, start_w + q), x4d(b + 2, m, start_h + p, start_w + q));
-	  t3 = __halves2half2(x4d(b + 5, m, start_h + p, start_w + q), x4d(b + 4, m, start_h + p, start_w + q));
-	  t4 = __halves2half2(x4d(b + 7, m, start_h + p, start_w + q), x4d(b + 6, m, start_h + p, start_w + q));
-	}
-	else if(b + 6 < B){
-	  t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	  t2 = __halves2half2(x4d(b + 3, m, start_h + p, start_w + q), x4d(b + 2, m, start_h + p, start_w + q));
-	  t3 = __halves2half2(x4d(b + 5, m, start_h + p, start_w + q), x4d(b + 4, m, start_h + p, start_w + q));
-	  t4 = __halves2half2(0, x4d(b + 6, m, start_h + p, start_w + q));
-
-	}
-	else if(b + 5 < B){
-	    t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	    t2 = __halves2half2(x4d(b + 3, m, start_h + p, start_w + q), x4d(b + 2, m, start_h + p, start_w + q));
-	    t3 = __halves2half2(x4d(b + 5, m, start_h + p, start_w + q), x4d(b + 4, m, start_h + p, start_w + q));
-	}
-	else if(b + 4 < B){
-	  t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	  t2 = __halves2half2(x4d(b + 3, m, start_h + p, start_w + q), x4d(b + 2, m, start_h + p, start_w + q));
-	  t3 = __halves2half2(0, x4d(b + 4, m, start_h + p, start_w + q));
-
-	}
-	else if(b + 3 < B){
-	    t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	    t2 = __halves2half2(x4d(b + 3, m, start_h + p, start_w + q), x4d(b + 2, m, start_h + p, start_w + q));
-	 }
-	else if(b + 2 < B){
-	  t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	  t2 = __halves2half2(0, x4d(b + 2, m, start_h + p, start_w + q));
-
-	}
-	else if(b + 1 < B){
-	  t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	}
-	else{
-	  t1 = __halves2half2(0, x4d(b, m, start_h + p, start_w + q));
-
-	 }
-
-	
-	c0 = __hfma2(t1, __halves2half2(weights[k], weights[k]), c0);
-	c1 = __hfma2(t2, __halves2half2(weights[k], weights[k]), c1);
-	c2 = __hfma2(t3, __halves2half2(weights[k], weights[k]), c2);
-	c3 = __hfma2(t4, __halves2half2(weights[k], weights[k]), c3);
-
-      }
-    }
-
-    y4d(b, m, 0, tx) = __high2half(c0);	
-    if(b + 1 < B)
-      y4d(b + 1, m, 0, tx) = __low2half(c0);
-    if(b + 2 < B)
-      y4d(b + 2, m, 0, tx) = __high2half(c1);
-    if(b + 3 < B)
-      y4d(b + 3, m, 0, tx) = __low2half(c1);
-    if(b + 4 < B)
-      y4d(b + 4, m, 0, tx) = __high2half(c2);
-    if(b + 5 < B)
-      y4d(b + 5, m, 0, tx) = __low2half(c2);
-    if(b + 6 < B)
-      y4d(b + 6, m, 0, tx) = __high2half(c3);
-    if(b + 7 < B)
-      y4d(b + 7, m, 0, tx) = __low2half(c3);
-  }
-	
-  #undef y4d 
-  #undef x4d
-}
-
-
-//When stride is 1
-__global__ void depthwise_conv4_half3(__half* const __restrict__ y,
-	const __half* const __restrict__ x,
-	const __half* const __restrict__ w,
-	const int B, const int M,
-	const int H, const int W, const int KH,
-	const int KW, const int H_out, const int W_out,
-	const int H_pad, const int W_pad,
-	const int C_dim, const int H_dim, const int W_dim)
-{
-
-#define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-#define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-	const int num = 1;
-
-	const int b = num * blockIdx.x;
-	const int m = (blockIdx.y * blockDim.x + threadIdx.x) / (H_out * W_out);
-
-	if (m < M) {
-		const int tx = (blockIdx.y * blockDim.x + threadIdx.x) % (H_out * W_out);
-
-		const int start_h = (tx / W_out) - H_pad;
-		const int start_w = (tx % W_out) - W_pad;
-
-		const int bstart_h = (blockIdx.y * blockDim.x % (H_out * W_out)) / W_out - H_pad;
-		const int bstart_w = (blockIdx.y * blockDim.x % (H_out * W_out)) % W_out - H_pad;
-		const int bstartm = (blockIdx.y * blockDim.x / (H_out * W_out));
-
-		extern __shared__ __half xdata[];
-
-		for (int i = 0; i < C_dim * H_dim * W_dim; i += blockDim.x) {
-			if (i / (H_dim * W_dim) + bstartm < M && (i % (H_dim * W_dim)) / W_dim + bstart_h > -1 &&
-				(i % (H_dim * W_dim)) / W_dim + bstart_h < H && (i % (H_dim * W_dim)) % W_dim + bstart_w > -1 &&
-				(i % (H_dim * W_dim)) % W_dim + bstart_w < W) {
-				xdata[i] = x4d(b, i / (H_dim * W_dim) + bstartm, (i % (H_dim * W_dim)) / W_dim + bstart_h,
-					(i % (H_dim * W_dim)) % W_dim + bstart_w);
-			}
-		}
-		__syncthreads();
-
-		__half c0;
-		const __half* weights = &w[m * KH * KW];
-
-		for (int k = 0; k < KH * KW; k++) {
-			int p = k / KW;
-			int q = k % KW;
-			if (start_h + p > -1 && start_h + p < H &&
-				start_w + q > -1 && start_w + q < W) {
-
-
-				__half t1;
-
-				//int total = C_dim * H_dim * W_dim;
-				t1 = xdata[(m - bstartm) * H_dim * W_dim + (start_h + p - bstart_h) * W_dim +
-					start_w + q - bstart_w];
-
-
-				c0 = __hfma(t1, weights[k], c0);
-			}
-		}
-
-		y4d(b, m, 0, tx) = c0;
-
-
-	}
-
-#undef y4d 
-#undef x4d
-}
-
-
-__global__ void depthwise_convNew4_half2(__half* const __restrict__ y,
-					const __half* const __restrict__ x,
-					const __half* const __restrict__ w,
-					const int B, const int M,
-					const int H, const int W, const int KH,
-					const int KW, const int H_out, const int W_out,
-					const int H_pad, const int W_pad,
-					const int H_stride, const int W_stride)
-{
-
-  #define y4d(i3, i2, i1, i0) y[(i3) * (M * H_out * W_out) + (i2) * (H_out * W_out) + (i1) * (W_out) + i0]
-  #define x4d(i3, i2, i1, i0) x[(i3) * (M * H * W) + (i2) * (H * W) + (i1) * (W) + i0]
-
-  const int num = 4;
-
-  const int b = num * blockIdx.x;
-  const int m = (blockIdx.y * blockDim.x  + threadIdx.x)/ (H_out * W_out);
-	
-  if(m < M){
-    const int tx = (blockIdx.y * blockDim.x  + threadIdx.x) % (H_out * W_out);
-
-    const int start_h = (tx / W_out) * H_stride - H_pad;
-    const int start_w = (tx % W_out) * W_stride - W_pad;
-
-    __half2 c0 = __half2half2(0);
-    __half2 c1 = __half2half2(0);
- 
-    const __half* weights = &w[m * KH * KW];
-
-    for (int k = 0; k < KH * KW; k++) {
-      int p = k / KW;
-      int q = k % KW;
-      if (start_h + p > -1 && start_h + p < H &&
-	  start_w + q > -1 && start_w + q < W) {
-
-      
-	__half2 t1;
-	__half2 t2;
-	if(b + 3 < B){
-	    t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	    t2 = __halves2half2(x4d(b + 3, m, start_h + p, start_w + q), x4d(b + 2, m, start_h + p, start_w + q));
-	 }
-	else if(b + 2 < B){
-	  t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	  t2 = __halves2half2(0, x4d(b + 2, m, start_h + p, start_w + q));
-
-	}
-	else if(b + 1 < B){
-	  t1 = __halves2half2(x4d(b + 1, m, start_h + p, start_w + q), x4d(b, m, start_h + p, start_w + q));
-	}
-	else{
-	  t1 = __halves2half2(0, x4d(b, m, start_h + p, start_w + q));
-
-	 }
-
-	
-	c0 = __hfma2(t1, __halves2half2(weights[k], weights[k]), c0);
-	c1 = __hfma2(t2, __halves2half2(weights[k], weights[k]), c1);
-	
-      }
-    }
-
-    y4d(b, m, 0, tx) = __high2half(c0);	
-    if(b + 1 < B)
-      y4d(b + 1, m, 0, tx) = __low2half(c0);
-    if(b + 2 < B)
-      y4d(b + 2, m, 0, tx) = __high2half(c1);
-    if(b + 3 < B)
-      y4d(b + 3, m, 0, tx) = __low2half(c1);
-
-  }
-	
-  #undef y4d 
-  #undef x4d
-}
-
-
-void* tensorConvCutlass(void* input_ptr, void* filter_ptr,
-			int vertical_pad, int horizontal_pad,
-			int vertical_stride, int horizontal_stride,
-			int conv_mode, int conv_groups){
-
-
-  INFO("*** TensorConvolution \n");
-  profileEvent("Conv");
-
-  Tensor* input = (Tensor*)input_ptr;
-  Tensor* filter = (Tensor*)filter_ptr;
-
-  //FIXME: Current hack to preserve backward compatibilty
-  if (conv_groups == 0) {
-    conv_groups = 1;
-  }
-
-  Tensor* output;
-
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  convertToFP32(input);
-  convertToFP32(filter);
-
-  
-  if (conv_groups > 32) {
-    // TODO: Support other cases;  
-    hostToDeviceCopy(input);
-    hostToDeviceCopy(filter);
-
-    int n, c, h, w; // output dimensions  
-    n = input->dims.dim_sizes[0];
-    c = input->dims.dim_sizes[1];
-    const int KH = filter->dims.dim_sizes[2];
-    const int KW = filter->dims.dim_sizes[3];
-    h = (2 * vertical_pad + input->dims.dim_sizes[2] - KH) / vertical_stride + 1;
-    w = (2 * horizontal_pad + input->dims.dim_sizes[3] - KW) / horizontal_stride + 1;
-
-    output = (Tensor*)create4DTensor((cudnnDataType_t) float_type, //input->data_type,
-				     CUDNN_TENSOR_NCHW, n, c, h, w);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-
-
-		
-    int blockSize;
-    blockSize = 64;
-		
-    dim3 grid(((n + 7)/ 8), (c * h * w + blockSize - 1)/ blockSize);
-    dim3 block(blockSize);
-    depthwise_convNew8<<<grid, block>>> ((float*)output->gpu_data,
-					 (float*)input->gpu_data, (float*)filter->gpu_data,
-					 input->dims.dim_sizes[0], input->dims.dim_sizes[1],
-					 input->dims.dim_sizes[2], input->dims.dim_sizes[3],
-					 KH, KW, h, w, vertical_pad, horizontal_pad,
-					 vertical_stride, horizontal_stride);
-
-  }
-  else {
-
-    cudnnConvolutionDescriptor_t convDesc;
-    cudnnConvolutionFwdAlgo_t convAlgo;
-    cudnnConvolutionMode_t mode;
-    if (conv_mode == 0)
-      mode = CUDNN_CONVOLUTION;
-    else if (conv_mode == 1)
-      mode = CUDNN_CROSS_CORRELATION;
-
-    // FIXIT: Need to be more aware of the implications of alpha and beta
-    float alpha = 1.0f, beta = 0.0f;
-
-    // TODO: Support other cases;  
-    hostToDeviceCopy(input);
-    hostToDeviceCopy(filter);
-
-    INFO("vertical_stride = %lu, horizontal_stride = %lu \n", vertical_stride, horizontal_stride);
-
-    checkCUDNN(cudnnCreateConvolutionDescriptor(&convDesc));
-
-    // NOTE: Adding support for grouped convolution
-    checkCUDNN(cudnnSetConvolutionGroupCount(convDesc, conv_groups));
-
-
-    cudnnDataType_t computeType = CUDNN_DATA_FLOAT;
-    // FIXIT: Think if upscaling values need to be configurable?
-    // IMP-FIXIT: Either make mode configurable OR see if CUDNN_CONVOLUTION MODE should be used?
-    checkCUDNN(cudnnSetConvolution2dDescriptor(convDesc,
-					       vertical_pad, horizontal_pad, // conv padding
-					       vertical_stride, horizontal_stride, // conv strides
-					       1, 1, // upscaling values
-					       mode, // mode is configurable
-					       computeType)); // defines compute precision
-
-    int n, c, h, w; // output dimensions  
-    // Find dimension of convolution output
-    checkCUDNN(cudnnGetConvolution2dForwardOutputDim(convDesc,
-						     input->tensor_desc,
-						     filter->filter_desc,
-						     &n, &c, &h, &w));
-
-
-    DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-    if (input->data_format == CUDNN_TENSOR_NCHW)
-      output = (Tensor*)create4DTensor((cudnnDataType_t) float_type, // input->data_type,
-				       CUDNN_TENSOR_NCHW, n, c, h, w);
-    else if (input->data_format == CUDNN_TENSOR_NHWC) {
-      DEBUG("* NHWC Format \n");
-      output = (Tensor*)create4DTensor((cudnnDataType_t) float_type, //input->data_type,
-				       CUDNN_TENSOR_NHWC, n, h, w, c);
-    }
-    else
-      ERROR("Unsupported Tensor Type");
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-
-    DEBUG("tensor->data_type = %d, tensor->data_format = %d, N = %d, C = %d, H = %d, W = %d \n",
-	  output->data_type, output->data_format, output->dims.dim_sizes[0], output->dims.dim_sizes[1],
-	  output->dims.dim_sizes[2], output->dims.dim_sizes[3]);
-
-    if (convDesc == NULL || input->tensor_desc == NULL ||
-	filter->filter_desc == NULL || output->tensor_desc == NULL)
-      ERROR("NULL descriptor! \n");
-
-
-    // NOTE-FIXIT: function failing for NHWC formats - perhaps some CUDNN support is lacking
-    checkCUDNN(cudnnGetConvolutionForwardAlgorithm(cudnnHandle,
-						   input->tensor_desc,
-						   filter->filter_desc,
-						   convDesc,
-						   output->tensor_desc,
-						   CUDNN_CONVOLUTION_FWD_PREFER_FASTEST,
-						   //CUDNN_CONVOLUTION_FWD_NO_WORKSPACE,
-						   0,
-						   &convAlgo));
-
-
-    DEBUG("ConvAlgo = %d, FFT = %d, GEMM = %d, WINOGRAD = %d \n", convAlgo,
-	  CUDNN_CONVOLUTION_FWD_ALGO_FFT, CUDNN_CONVOLUTION_FWD_ALGO_GEMM,
-	  CUDNN_CONVOLUTION_FWD_ALGO_WINOGRAD);
-
-
-    // FIXIT: Algo shouldn't be hardcoded
-    convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_GEMM;
-
-    size_t workspace_size;
-    checkCUDNN(cudnnGetConvolutionForwardWorkspaceSize(cudnnHandle,
-						       input->tensor_desc,
-						       filter->filter_desc,
-						       convDesc,
-						       output->tensor_desc,
-						       convAlgo,
-						       &workspace_size));
-
-    // Allocating memory for the convolution workspace
-    void* workspace;
-    checkCudaErrors(cudaMalloc(&workspace, workspace_size));
-    DEBUG("workspace size = %d \n", workspace_size);
-
-
-    checkCUDNN(cudnnConvolutionForward(cudnnHandle, &alpha, input->tensor_desc,
-				       input->gpu_data, filter->filter_desc, filter->gpu_data,
-				       convDesc, convAlgo, workspace, workspace_size,
-				       &beta, output->tensor_desc, output->gpu_data));
-  }
-
-  cudaDeviceSynchronize();
-  profileEvent("Conv_end", true);
-
-
-  #ifdef ERROR_INJECTION_ENABLED
-
-  if (op_counter >= total_ops) {
-    ERROR("No accuracy flag found \n");
-  }
-
-  int op_acc = op_accuracies[op_counter];
-
-  // Skip errorInjection if explicitly requested
-  if (skip_tensors.find(op_counter) != skip_tensors.end()) {
-    op_acc = 0;
-  }
-
-  void* error_norms = tensorAddError(output, op_acc);
-  add_norms(error_norms, "tensorConv", op_acc);
-  add_conv_overheads(input, filter, vertical_stride, horizontal_stride, op_acc);
-
-  op_counter++;
-
-  #endif   
-
-  return output;
-
-
-}
-
-// FIXME: Need to properly fix the new HALF type conversion
-void* tensorHalfConvCutlass(void* input_ptr, void* filter_ptr,
-			    int vertical_pad, int horizontal_pad,
-			    int vertical_stride, int horizontal_stride,
-			    int conv_mode, int conv_groups){
-
-  INFO("*** TensorHConvolution \n");
-  profileEvent("#Conv");
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* filter = (Tensor*) filter_ptr;
-
-  cudnnConvolutionDescriptor_t convDesc;
-  cudnnConvolutionFwdAlgo_t convAlgo;
-  cudnnConvolutionMode_t mode;
-  
-  if(conv_mode == 0)
-    mode = CUDNN_CONVOLUTION;
-  else if(conv_mode == 1)
-    mode = CUDNN_CROSS_CORRELATION;
-
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  float alpha = 1.0f, beta = 0.0f;
-  // NOTE: compute in half precision
-  cudnnDataType_t computeType = CUDNN_DATA_HALF;
-
-  // NOTE: Moving inputs to GPU global memory
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-
-  /***** CONVERSIONS from FP32 to FP16 - on the GPU */
-  size_t* input_dims = input->dims.dim_sizes;
-  size_t* filter_dims = filter->dims.dim_sizes;
-
-
-  profileEvent("F2H_start");
-
-  convertToFP16(input);
-  convertToFP16(filter);  
-
-  profileEvent("F2H_end");
-  /******* END OF INPUT DATA CONVERSIONS*/
-
-  
-  Tensor *output;
-  if(conv_groups > 1){
-    int n = input->dims.dim_sizes[0];
-    int c = input->dims.dim_sizes[1];
-    const int KH = filter->dims.dim_sizes[2];
-    const int KW = filter->dims.dim_sizes[3];
-    int h = (2 * vertical_pad + input->dims.dim_sizes[2] - KH) / vertical_stride + 1;
-    int w = (2 * horizontal_pad + input->dims.dim_sizes[3] - KW) / horizontal_stride + 1;
-    
-    DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-    
-
-    output = (Tensor*) create4DTensor((cudnnDataType_t) half_type, //input->data_type,
-				      CUDNN_TENSOR_NCHW, n, c, h, w);
-
-  
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-
-    int blockSize;
-    blockSize = 128;
-
-    dim3 grid(((n + 7)/ 8), (c * h * w + blockSize - 1)/ blockSize);
-    dim3 block(blockSize);
-    depthwise_convNew8_half2<<<grid, block>>> ((__half*) output->gpu_half_data,
-					      (__half*) input->gpu_half_data,
-					      (__half*) filter->gpu_half_data,
-					      input->dims.dim_sizes[0], input->dims.dim_sizes[1],
-					      input->dims.dim_sizes[2], input->dims.dim_sizes[3],
-					      KH, KW, h, w,
-					      vertical_pad, horizontal_pad,
-					      vertical_stride, horizontal_stride);
-    cudaDeviceSynchronize();
-
-    
-  }
-  else{    
-    checkCUDNN(cudnnCreateConvolutionDescriptor(&convDesc));
-
-    //FIXME: Current hack to preserve backward compatibilty
-    if(conv_groups == 0){
-      conv_groups = 1;
-    }
-  
-    // NOTE: Adding support for grouped convolution
-    checkCUDNN(cudnnSetConvolutionGroupCount(convDesc, conv_groups));
-
-  
-    checkCUDNN(cudnnSetConvolution2dDescriptor(convDesc,
-					       vertical_pad, horizontal_pad, // conv padding
-					       vertical_stride, horizontal_stride, // conv strides
-					       1, 1, // upscaling values
-					       mode, // mode is configurable
-					       computeType)); // defines compute precision
-
-    int n, c, h, w; // output dimensions
-    // Find dimension of convolution output
-    checkCUDNN(cudnnGetConvolution2dForwardOutputDim(convDesc,
-						     input->tensor_half_desc,
-						     filter->filter_half_desc,
-						     &n, &c, &h, &w));
-    DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-
-    output = (Tensor*) create4DTensor((cudnnDataType_t) half_type, //input->data_type,
-				      CUDNN_TENSOR_NCHW, n, c, h, w);
-
-  
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-
-    DEBUG("tensor->data_type = %d, tensor->data_format = %d, N = %d, H = %d, W = %d, C = %d \n",
-	  output->data_type, output->data_format,
-	  output->dims.dim_sizes[0], output->dims.dim_sizes[1],
-	  output->dims.dim_sizes[2], output->dims.dim_sizes[3]);
-
-    if(convDesc == NULL || input->tensor_desc == NULL ||
-       filter->filter_desc == NULL || output->tensor_desc == NULL)
-      ERROR("NULL descriptor! \n");
-
-
-    // NOTE: The following algo works with TRUE half precision
-    convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_PRECOMP_GEMM;
-    //convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_GEMM;
-
-  
-    size_t workspace_size;
-    checkCUDNN(cudnnGetConvolutionForwardWorkspaceSize(cudnnHandle,
-						       input->tensor_half_desc,
-						       filter->filter_half_desc,
-						       convDesc,
-						       output->tensor_half_desc,
-						       convAlgo,
-						       &workspace_size));
-
-    // Allocating memory for the convolution workspace
-    DEBUG("workspace size = %d \n", workspace_size);
-    void* workspace;
-    checkCudaErrors(cudaMalloc(&workspace, workspace_size));
-
-
-
-
-    checkCUDNN(cudnnConvolutionForward(cudnnHandle,
-				       &alpha,
-				       input->tensor_half_desc,
-				       input->gpu_half_data,
-				       filter->filter_half_desc,
-				       filter->gpu_half_data,
-				       convDesc, convAlgo, workspace, workspace_size,
-				       &beta,
-				       output->tensor_half_desc,
-				       output->gpu_half_data));
-
-  }
-  
-  profileEvent("H2F_start");
-
-  convertToFP32_offline(output);
-  
-  profileEvent("H2F_end");
-
-
-
-  profileEvent("#Conv_end");
-
-
-  
-  return output;
-
-}
-
-void* tensorHalfConvCutlass2(void* input_ptr, void* filter_ptr,
-			     int vertical_pad, int horizontal_pad,
-			     int vertical_stride, int horizontal_stride,
-			     int conv_mode, int conv_groups){
-
-  INFO("*** TensorHConvolution \n");
-  profileEvent("#Conv");
-
-  Tensor* input = (Tensor*)input_ptr;
-  Tensor* filter = (Tensor*)filter_ptr;
-
-  cudnnConvolutionDescriptor_t convDesc;
-  cudnnConvolutionFwdAlgo_t convAlgo;
-  cudnnConvolutionMode_t mode;
-  if (conv_mode == 0)
-    mode = CUDNN_CONVOLUTION;
-  else if (conv_mode == 1)
-    mode = CUDNN_CROSS_CORRELATION;
-
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  float alpha = 1.0f, beta = 0.0f;
-  // NOTE: compute in half precision
-  cudnnDataType_t computeType = CUDNN_DATA_HALF;
-
-  // NOTE: Moving inputs to GPU global memory
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-
-  /***** CONVERSIONS from FP32 to FP16 - on the GPU */
-  size_t* input_dims = input->dims.dim_sizes;
-  size_t* filter_dims = filter->dims.dim_sizes;
-
-
-  profileEvent("F2H_start");
-
-  Tensor* input_half = (Tensor*)create4DTensor(CUDNN_DATA_HALF, CUDNN_TENSOR_NCHW,
-					       input_dims[0], input_dims[1],
-					       input_dims[2], input_dims[3]);
-
-
-  changeTensorPlacement(input_half, DEVICE);
-  Tensor* filter_half = (Tensor*)create4DTensor(CUDNN_DATA_HALF, CUDNN_TENSOR_NCHW,
-						filter_dims[0], filter_dims[1],
-						filter_dims[2], filter_dims[3]);
-
-
-  changeTensorPlacement(filter_half, DEVICE);
-
-
-  f2h((float*)input->gpu_data, input->num_elems, (half*)input_half->gpu_data);
-  f2h((float*)filter->gpu_data, filter->num_elems, (half*)filter_half->gpu_data);
-
-
-  /******* END OF INPUT DATA CONVERSIONS*/
-  profileEvent("F2H_end");
-
-  Tensor* output;
-  Tensor* output_half;
-
-
-  if (conv_groups > 1 && horizontal_stride == 1 && vertical_stride == 1) {
-    int n = input->dims.dim_sizes[0];
-    int c = input->dims.dim_sizes[1];
-    const int KH = filter->dims.dim_sizes[2];
-    const int KW = filter->dims.dim_sizes[3];
-    int h = (2 * vertical_pad + input->dims.dim_sizes[2] - KH) / vertical_stride + 1;
-    int w = (2 * horizontal_pad + input->dims.dim_sizes[3] - KW) / horizontal_stride + 1;
-
-    DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-
-    output = (Tensor*)create4DTensor((cudnnDataType_t)input->data_type,
-				     CUDNN_TENSOR_NCHW, n, c, h, w);
-    // FIXIT: more checks for data types needed
-    output_half = (Tensor*)create4DTensor(CUDNN_DATA_HALF,
-					  CUDNN_TENSOR_NCHW, n, c, h, w);
-
-
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-
-    int blockSize;
-    blockSize = 128;
-
-    dim3 grid(((n + 3) / 4), (c * h * w + blockSize - 1) / blockSize);
-    dim3 block(blockSize);
-    int C_dim = blockSize / (h * w) + 1 + 1;
-    int H_dim = blockSize % (h * w) / w + 1 + KH + 1;
-    int W_dim = blockSize % (h * w) % w + 1 + KW + 1;
-    depthwise_conv4_half3 << <grid, block, sizeof(__half)* C_dim* H_dim* W_dim >> > ((__half*)output_half->gpu_data,
-										     (__half*)input_half->gpu_data, (__half*)filter_half->gpu_data,
-										     input->dims.dim_sizes[0], input->dims.dim_sizes[1],
-										     input->dims.dim_sizes[2], input->dims.dim_sizes[3],
-										     KH, KW, h, w,
-										     vertical_pad, horizontal_pad, C_dim, H_dim, W_dim);
-    cudaDeviceSynchronize();
-
-
-  }
-  else {
-    checkCUDNN(cudnnCreateConvolutionDescriptor(&convDesc));
-
-    //FIXME: Current hack to preserve backward compatibilty
-    if (conv_groups == 0) {
-      conv_groups = 1;
-    }
-
-    // NOTE: Adding support for grouped convolution
-    checkCUDNN(cudnnSetConvolutionGroupCount(convDesc, conv_groups));
-
-
-    // FIXIT: Think if upscaling values need to be configurable?
-    // IMP-FIXIT:  CUDNN Cross correlation is only used in the Lenet context
-    // IMP-FIXIT: Either make mode configurable OR see if CUDNN_CONVOLUTION MODE should be used?
-    checkCUDNN(cudnnSetConvolution2dDescriptor(convDesc,
-					       vertical_pad, horizontal_pad, // conv padding
-					       vertical_stride, horizontal_stride, // conv strides
-					       1, 1, // upscaling values
-					       mode, // mode is configurable
-					       computeType)); // defines compute precision
-
-    int n, c, h, w; // output dimensions
-    // Find dimension of convolution output
-    checkCUDNN(cudnnGetConvolution2dForwardOutputDim(convDesc,
-						     input->tensor_desc,
-						     filter->filter_desc,
-						     &n, &c, &h, &w));
-    DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-
-    output = (Tensor*)create4DTensor((cudnnDataType_t)input->data_type,
-				     CUDNN_TENSOR_NCHW, n, c, h, w);
-    // FIXIT: more checks for data types needed
-    output_half = (Tensor*)create4DTensor(CUDNN_DATA_HALF,
-					  CUDNN_TENSOR_NCHW, n, c, h, w);
-
-
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-
-    DEBUG("tensor->data_type = %d, tensor->data_format = %d, N = %d, H = %d, W = %d, C = %d \n",
-	  output->data_type, output->data_format, output->dims.dim_sizes[0], output->dims.dim_sizes[1],
-	  output->dims.dim_sizes[2], output->dims.dim_sizes[3]);
-
-    if (convDesc == NULL || input->tensor_desc == NULL ||
-	filter->filter_desc == NULL || output->tensor_desc == NULL)
-      ERROR("NULL descriptor! \n");
-
-
-    // NOTE: The following algo works with TRUE half precision
-    convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_PRECOMP_GEMM;
-    //convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_GEMM;
-
-
-    size_t workspace_size;
-    checkCUDNN(cudnnGetConvolutionForwardWorkspaceSize(cudnnHandle,
-						       input_half->tensor_desc,
-						       filter_half->filter_desc,
-						       convDesc,
-						       output_half->tensor_desc,
-						       convAlgo,
-						       &workspace_size));
-
-    // Allocating memory for the convolution workspace
-    DEBUG("workspace size = %d \n", workspace_size);
-    void* workspace;
-    checkCudaErrors(cudaMalloc(&workspace, workspace_size));
-
-
-
-
-    checkCUDNN(cudnnConvolutionForward(cudnnHandle,
-				       &alpha,
-				       input_half->tensor_desc,
-				       input_half->gpu_data,
-				       filter_half->filter_desc,
-				       filter_half->gpu_data,
-				       convDesc, convAlgo, workspace, workspace_size,
-				       &beta,
-				       output_half->tensor_desc,
-				       output_half->gpu_data));
-
-  }
-
-  profileEvent("H2F_start");
-
-  // NOTE: Transforming half precision output to single precision
-  h2f((half*)output_half->gpu_data, output->num_elems, (float*)output->gpu_data);
-
-  profileEvent("H2F_end");
-
-  profileEvent("#Conv_end");
-
-
-  freeTensor(input_half);
-  freeTensor(filter_half);
-  freeTensor(output_half);
-
-  return output;
-
-}
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void interpolateCol(int N, int old_w, int n, int c, int h, int w, float *old_data, float *new_data){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if(col % 2 == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + col / 2];
-    else if(col == w - 1)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + (col-1) / 2];
-    else
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	(old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + (col-1) / 2] +
-	 old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + (col+1) / 2])/2;
-
-  }
-}
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void interpolateRow(int N, int old_h, int n, int c, int h, int w, float *old_data, float *new_data){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if(row % 2 == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + (row/2) * (w) + col];
-    else if(row == h - 1)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + (row-1)/2 * (w) + col];
-    else
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	(old_data[n * (c * old_h * w) + ch * (old_h * w) + (row -1)/2 * (w) + col] +
-	 old_data[n * (c * old_h * w) + ch * (old_h * w) + ((row+1) / 2) * (w) + col])/2;
-
-  }
-}
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void interpolateXCol(int N, int old_w, int n, int c, int h, int w,
-		     float *old_data, float *new_data, int num){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if(col % num == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + col / num];
-    else{
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + (col-(col%num)) / num];
-    }
-
-  }
-}
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void interpolateXRow(int N, int old_h, int n, int c, int h, int w,
-		     float *old_data, float *new_data, int num){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if(row % num == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + (row/num) * (w) + col];
-    else{
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + (row - (row % num))/num * (w) + col];
-    }
-
-  }
-}
-
-
-// Perforated Tensor Conv with 'perforation_rate' parameter
-void* tensorConvPerf(void* input_ptr, void* filter_ptr,
-		     int vertical_pad, int horizontal_pad,
-		     int vertical_stride, int horizontal_stride,
-		     int conv_mode, int conv_groups, int row, int col){
-  llvm_hpvm_initTensorRt(0);
-
-  INFO("*** TensorConvolution \n");
-  profileEvent("tensorConv");
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* filter = (Tensor*) filter_ptr;
-
-  cudnnConvolutionDescriptor_t convDesc;
-  cudnnConvolutionFwdAlgo_t convAlgo;
-  cudnnConvolutionMode_t mode;
-  if(conv_mode == 0)
-    mode = CUDNN_CONVOLUTION;
-  else if(conv_mode == 1)
-    mode = CUDNN_CROSS_CORRELATION;
-
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  float alpha = 1.0f, beta = 0.0f;
-
-  // TODO: Support other cases;
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  INFO("vertical_stride = %lu, horizontal_stride = %lu \n", vertical_stride, horizontal_stride);
-
-  checkCUDNN(cudnnCreateConvolutionDescriptor(&convDesc));
-
-  //FIXME: Current hack to preserve backward compatibilty
-  if(conv_groups == 0){
-    conv_groups = 1;
-  }
-
-  // NOTE: Adding support for grouped convolution
-  checkCUDNN(cudnnSetConvolutionGroupCount(convDesc, conv_groups));
-
-  int new_v = vertical_stride + row;
-  int new_h = horizontal_stride + col;
-  cudnnDataType_t computeType = CUDNN_DATA_FLOAT;
-  // FIXIT: Think if upscaling values need to be configurable?
-  // IMP-FIXIT: Either make mode configurable OR see if CUDNN_CONVOLUTION MODE should be used?
-  checkCUDNN(cudnnSetConvolution2dDescriptor(convDesc,
-					     vertical_pad, horizontal_pad, // conv padding
-					     new_v, new_h, // conv strides
-					     1, 1, // upscaling values
-					     mode , // mode is configurable
-					     computeType)); // defines compute precision
-
-  int n, c, h, w; // output dimensions
-  // Find dimension of convolution output
-  checkCUDNN(cudnnGetConvolution2dForwardOutputDim(convDesc,
-						   input->tensor_desc,
-						   filter->filter_desc,
-						   &n, &c, &h, &w));
-
-
-  DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-  Tensor* output;
-  if(input->data_format == CUDNN_TENSOR_NCHW)
-    output = (Tensor*) create4DTensor((cudnnDataType_t) input->data_type,
-				      CUDNN_TENSOR_NCHW, n, c, h, w);
-  else if(input->data_format == CUDNN_TENSOR_NHWC){
-    DEBUG("* NHWC Format \n");
-    output = (Tensor*) create4DTensor((cudnnDataType_t) input->data_type,
-				      CUDNN_TENSOR_NHWC, n, h, w, c);
-  }
-  else
-    ERROR("Unsupported Tensor Type");
-
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE);
-  // NOTE: Necessary to insert the above call for every output tensor
-
-  DEBUG("tensor->data_type = %d, tensor->data_format = %d, N = %d, C = %d, H = %d, W = %d \n",
-	output->data_type, output->data_format, output->dims.dim_sizes[0], output->dims.dim_sizes[1],
-	output->dims.dim_sizes[2], output->dims.dim_sizes[3]);
-
-  if(convDesc == NULL || input->tensor_desc == NULL ||
-     filter->filter_desc == NULL || output->tensor_desc == NULL)
-    ERROR("NULL descriptor! \n");
-
-
-  // Debugging info prints
-  printTensorDescInfo(input);
-  printTensorDescInfo(filter);
-  printTensorDescInfo(output);
-
-  // NOTE-FIXIT: function failing for NHWC formats - perhaps some CUDNN support is lacking
-  checkCUDNN(cudnnGetConvolutionForwardAlgorithm(cudnnHandle,
-						 input->tensor_desc,
-						 filter->filter_desc,
-						 convDesc,
-						 output->tensor_desc,
-						 CUDNN_CONVOLUTION_FWD_PREFER_FASTEST,
-						 //CUDNN_CONVOLUTION_FWD_NO_WORKSPACE,
-						 0,
-						 &convAlgo));
-
-
-  DEBUG("ConvAlgo = %d, FFT = %d, GEMM = %d, WINOGRAD = %d \n", convAlgo,
-	CUDNN_CONVOLUTION_FWD_ALGO_FFT, CUDNN_CONVOLUTION_FWD_ALGO_GEMM,
-	CUDNN_CONVOLUTION_FWD_ALGO_WINOGRAD);
-
-
-  // FIXIT: Algo shouldn't be hardcoded
-  convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_PRECOMP_GEMM;
-
-  size_t workspace_size;
-  checkCUDNN(cudnnGetConvolutionForwardWorkspaceSize(cudnnHandle,
-						     input->tensor_desc,
-						     filter->filter_desc,
-						     convDesc,
-						     output->tensor_desc,
-						     convAlgo,
-						     &workspace_size));
-
-  // Allocating memory for the convolution workspace
-  void* workspace;
-  checkCudaErrors(cudaMalloc(&workspace, workspace_size));
-  DEBUG("workspace size = %d \n", workspace_size);
-
-
-  checkCUDNN(cudnnConvolutionForward(cudnnHandle, &alpha, input->tensor_desc,
-				     input->gpu_data, filter->filter_desc, filter->gpu_data,
-				     convDesc, convAlgo, workspace, workspace_size,
-				     &beta, output->tensor_desc, output->gpu_data));
-
-
-  int old_w = w;
-  int old_h = h;
-  h = (2 * vertical_pad + input->dims.dim_sizes[2] - filter->dims.dim_sizes[2]) / vertical_stride + 1;
-  w = (2 * horizontal_pad + input->dims.dim_sizes[3] - filter->dims.dim_sizes[3]) / horizontal_stride + 1;
-
-  Tensor* new_output;
-  if(input->data_format == CUDNN_TENSOR_NCHW)
-    new_output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, //input->data_type,
-					  CUDNN_TENSOR_NCHW, n, c, h, w);
-  else if(input->data_format == CUDNN_TENSOR_NHWC){
-    DEBUG("* NHWC Format \n");
-    new_output = (Tensor*) create4DTensor((cudnnDataType_t) input->data_type,
-					  CUDNN_TENSOR_NHWC, n, h, w, c);
-  }
-  else
-    ERROR("Unsupported Tensor Type");
-
-
-  int numBlocks = (n * c * h * w  + 127) / 128;
-  if(vertical_stride == 0 && row == 0)
-    return output;
-
-  if(vertical_stride == 1 && row == 1){
-    interpolateRow<<<numBlocks,128>>>(n * c * h * w, old_h, n, c, h, w,
-				      (float *)output->gpu_data, (float *)new_output->gpu_data);
-  }
-  else if(horizontal_stride == 1 && col == 1){
-    interpolateCol<<<numBlocks,128>>>(n * c * h * w, old_w, n, c, h, w,
-				      (float *)output->gpu_data, (float *)new_output->gpu_data);
-  }
-  else if (col > 0){
-    interpolateXCol<<<numBlocks,128>>>(n * c * h * w, old_w, n, c, h, w,
-				       (float *)output->gpu_data, (float *)new_output->gpu_data, col + 1);
-  }
-  else{
-    interpolateXRow<<<numBlocks,128>>>(n * c * h * w, old_h, n, c, h, w,
-				       (float *)output->gpu_data, (float *)new_output->gpu_data, row + 1);
-  }
-
-
-  cudaDeviceSynchronize();
-
-  profileEvent("tensorConv_end", true);
-
-
-  changeTensorPlacement(new_output, DEVICE);
-  return new_output;
-
-}
-
-
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void interpolateColHalf(int N, int old_w, int n, int c, int h, int w, __half *old_data, __half *new_data){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if(col % 2 == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + col / 2];
-    else if(col == w - 1)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + (col-1) / 2];
-    else
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	__hdiv(__hadd(old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + (col-1) / 2],
-		      old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + (col+1) / 2]),2);
-
-  }
-}
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void interpolateRowHalf(int N, int old_h, int n, int c, int h, int w, __half *old_data, __half *new_data){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if(row % 2 == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + (row/2) * (w) + col];
-    else if(row == h - 1)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + (row-1)/2 * (w) + col];
-    else
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	__hdiv(__hadd(old_data[n * (c * old_h * w) + ch * (old_h * w) + (row -1)/2 * (w) + col],
-		      old_data[n * (c * old_h * w) + ch * (old_h * w) + ((row+1) / 2) * (w) + col]), 2);
-
-  }
-}
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void interpolateXColHalf(int N, int old_w, int n, int c, int h, int w,
-		     __half *old_data, __half *new_data, int num){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if(col % num == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + col / num];
-    else{
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + (col-(col%num)) / num];
-    }
-
-  }
-}
-
-//N is new_data's size
-//n, c, h, w are the dimensions of new_data
-__global__
-void interpolateXRowHalf(int N, int old_h, int n, int c, int h, int w,
-		     __half *old_data, __half *new_data, int num){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-
-    if(row % num == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + (row/num) * (w) + col];
-    else{
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + (row - (row % num))/num * (w) + col];
-    }
-
-  }
-}
-
-
-//produces N COL MAJOR matrixes with H_out*W_out rows and reduced_filter_elem cols
-__global__ void convToGemmApprox(float * const __restrict__ output,
-				 const float * const __restrict input, const int N, const int C,
-				 const int H, const int W,
-				 const int KH, const int KW, const int V_pad,
-				 const int H_pad, const int H_out,
-				 const int W_out, const int V_stride,
-				 const int H_stride, const int reduced_filter_elem,
-				 const int skip_every) {
-  
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_out * W_out); //output image number
-  const int c = tx % (C * H_out * W_out) / (H_out * W_out); //output chan number
-  const int h = tx % (H_out * W_out) / W_out; //output height index (row number)
-  const int w = tx % W_out; //output width index (col number)
-  const int inH = h * V_stride - V_pad; //input height index (row number)
-  const int inW = w * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    for(int i = 0; i < KH; i++) {
-      for(int j = 0; j < KW; j++) {
-	const int filter_elem_num = (c * KH + i) * KW + j; //index of this filter element
-	if(filter_elem_num % skip_every != skip_every-1) { //are we including this filter element?
-	  const int output_col = filter_elem_num - (filter_elem_num/skip_every); //calculate output column, taking skipping into account
-	  if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	    output[((n * reduced_filter_elem + output_col) * H_out + h) * W_out + w] = input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-	  else
-	    output[((n * reduced_filter_elem + output_col) * H_out + h) * W_out + w] = 0;
-	}
-      }
-    }
-  }
-}
-
-
-//produces COL MAJOR matrix with reduced_filter_elem rows and NF cols
-__global__ void createReducedFilters(float * const __restrict__ output,
-				     const float * const __restrict input, const int NF,
-				     const int num_filter_elem, const int reduced_filter_elem,
-				     const int skip_every) {
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int fIdx = tx / num_filter_elem; //filter index
-  const int offset = tx % num_filter_elem; //offset within filter
-  if(fIdx < NF) { //is thread id within bounds?
-    if(offset % skip_every != skip_every-1) { //are we including this filter element?
-      const int output_row = offset - (offset/skip_every); //calculate output row, taking skipping into account
-      output[fIdx*reduced_filter_elem + output_row] = input[tx] * 2;
-    }
-  }
-}
-
-
-void* tensorConvolutionKernelSamp(void* input_ptr, void* filter_ptr,
-				  int vertical_pad, int horizontal_pad,
-				  int vertical_stride, int horizontal_stride,
-				  int conv_mode, int conv_groups, int skip_every){
-
-  INFO("*** TensorConvolution (w/ kernel sampling) \n");
-  profileEvent("Conv");
-  Tensor* input = (Tensor*)input_ptr;
-  Tensor* filter = (Tensor*)filter_ptr;
-  //FIXME: Current hack to preserve backward compatibilty
-  if (conv_groups == 0) {
-    conv_groups = 1;
-  }
-
-  Tensor* output;
-  // TODO: Support other cases;
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-
-  convertToFP32(input);
-  convertToFP32(filter);
-  
-  
-  int n, c, h, w; // output dimensions
-  n = input->dims.dim_sizes[0];
-  c = filter->dims.dim_sizes[0]; //number of filters
-  const int KH = filter->dims.dim_sizes[2];
-  const int KW = filter->dims.dim_sizes[3];
-  h = (2 * vertical_pad + input->dims.dim_sizes[2] - KH) / vertical_stride + 1;
-  w = (2 * horizontal_pad + input->dims.dim_sizes[3] - KW) / horizontal_stride + 1;
-  output = (Tensor*)create4DTensor((cudnnDataType_t)input->data_type,
-				   CUDNN_TENSOR_NCHW, n, c, h, w);
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE);
-  // NOTE: Necessary to insert the above call for every output tensor
-  //total number of filter elem
-  const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-  //reduced number after skipping
-  const int reduced_filter_elem = num_filter_elem - (num_filter_elem/skip_every);
-  float * convData;
-  int convDataSize = sizeof(float) * n * reduced_filter_elem * h * w;
-  checkCudaErrors(cudaMalloc(&convData, convDataSize));
-  float * reducedFilter;
-  checkCudaErrors(cudaMalloc(&reducedFilter, sizeof(float) * c * reduced_filter_elem));
-  const int filtBlockSize = 128;
-  const int filtGridSize = (c * num_filter_elem + filtBlockSize - 1) / filtBlockSize;
-  createReducedFilters<<<filtGridSize, filtBlockSize>>>(reducedFilter,
-							(float *)filter->gpu_data, c,
-							num_filter_elem, reduced_filter_elem,
-							skip_every);
-  const int blockSize = 128;
-  const int gridSize = (n * input->dims.dim_sizes[1] * h * w + blockSize - 1) / blockSize;
-  convToGemmApprox<<<gridSize, blockSize>>>(convData, (float *)input->gpu_data, n,
-					    input->dims.dim_sizes[1],
-					    input->dims.dim_sizes[2],
-					    input->dims.dim_sizes[3],
-					    KH, KW, vertical_pad, horizontal_pad, h, w,
-					    vertical_stride, horizontal_stride,
-					    reduced_filter_elem, skip_every);
-  checkCudaErrors(cudaDeviceSynchronize());
-  //Do the matrix multiplication. Want to multiply convData by filter->gpu_data[f * chan * KH * KW]
-  float alpha = 1.0f, beta = 0.0f;
-  checkCudaErrors(cublasSgemmStridedBatched(cublasHandle,
-					    CUBLAS_OP_N, CUBLAS_OP_N,
-					    h * w, c, reduced_filter_elem,
-					    &alpha,
-					    convData, h * w, reduced_filter_elem * h * w,
-					    reducedFilter, reduced_filter_elem, 0,
-					    &beta,
-					    (float *)output->gpu_data, h * w, c * h * w,
-					    n));
-  cudaFree(convData);
-  cudaFree(reducedFilter);
-  profileEvent("Conv_end", true);
-      #ifdef ERROR_INJECTION_ENABLED
-  if (op_counter >= total_ops) {
-    ERROR("No accuracy flag found \n");
-  }
-  int op_acc = op_accuracies[op_counter];
-  // Skip errorInjection if explicitly requested
-  if (skip_tensors.find(op_counter) != skip_tensors.end()) {
-    op_acc = 0;
-  }
-  void* error_norms = tensorAddError(output, op_acc);
-  add_norms(error_norms, "tensorConv", op_acc);
-  add_conv_overheads(input, filter, vertical_stride, horizontal_stride, op_acc);
-  op_counter++;
-      #endif
-  return output;
-}
-
-
-    /*
-      if (c > 255) {
-      dim3 grid((n / 16), c);
-      dim3 block(h * w);
-      depthwise_conv << <grid, block >> > ((float*)output->gpu_data,
-      (float*)input->gpu_data, (float*)filter->gpu_data,
-      input->dims.dim_sizes[0], input->dims.dim_sizes[1], input->dims.dim_sizes[2], input->dims.dim_sizes[3],
-      KH, KW, h, w, vertical_pad, horizontal_pad, vertical_stride, horizontal_stride);
-
-      }*/
-
-    /*
-      dim3 grid((n / 12), c);
-      dim3 block(h * w);
-      depthwise_conv12 <<<grid, block >>> ((float*)output->gpu_data,
-      (float*)input->gpu_data, (float*)filter->gpu_data,
-      input->dims.dim_sizes[0], input->dims.dim_sizes[1], input->dims.dim_sizes[2], input->dims.dim_sizes[3],
-      KH, KW, h, w, vertical_pad, horizontal_pad, vertical_stride, horizontal_stride);
-      if(n % 12 > 0){ 
-      dim3 grid2((n % 12), c);
-      dim3 block(h * w);
-      depthwise_conv <<<grid, block >>> ((float*)output->gpu_data,
-      (float*)input->gpu_data, (float*)filter->gpu_data,
-      input->dims.dim_sizes[0], input->dims.dim_sizes[1], input->dims.dim_sizes[2], input->dims.dim_sizes[3],
-      KH, KW, h, w, vertical_pad, horizontal_pad, vertical_stride, horizontal_stride, 12 * (n/12));
-      }
-    */
-
-
-
-
-
-
-
-//Tensor* input_half = (Tensor*) create4DTensor(CUDNN_DATA_HALF, CUDNN_TENSOR_NCHW,
-  //						input_dims[0], input_dims[1],
-  //						input_dims[2], input_dims[3]);
-
-
-  //changeTensorPlacement(input_half, DEVICE);
-  //Tensor* filter_half = (Tensor*) create4DTensor(CUDNN_DATA_HALF, CUDNN_TENSOR_NCHW,
-  //						 filter_dims[0], filter_dims[1],
-  //						 filter_dims[2], filter_dims[3]);
-
-  
-  //changeTensorPlacement(filter_half, DEVICE);
-
-
-  //f2h((float*) input->gpu_data, input->num_elems, (half*) input_half->gpu_data);
-  //f2h((float*) filter->gpu_data, filter->num_elems, (half*) filter_half->gpu_data);
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_techniques2.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_techniques2.h
deleted file mode 100644
index 6042cc7dae94090fc554669975c6e28ea8ebdda0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approx_techniques2.h
+++ /dev/null
@@ -1,1673 +0,0 @@
-
-#include "tensor_utils.cu"
-
-
-//produces N COL MAJOR matrixes with H_out*W_out rows and reduced_filter_elem cols
-__global__ void convToGemmApproxHalf(__half * const __restrict__ output,
-				     const __half * const __restrict input, const int N, const int C,
-				     const int H, const int W, const int KH,
-				     const int KW, const int V_pad,
-				     const int H_pad, const int H_out,
-				     const int W_out, const int V_stride,
-				     const int H_stride, const int reduced_filter_elem,
-				     const int skip_every) {
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_out * W_out); //output image number
-  const int c = tx % (C * H_out * W_out) / (H_out * W_out); //output chan number
-  const int h = tx % (H_out * W_out) / W_out; //output height index (row number)
-  const int w = tx % W_out; //output width index (col number)
-  const int inH = h * V_stride - V_pad; //input height index (row number)
-  const int inW = w * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    for(int i = 0; i < KH; i++) {
-      for(int j = 0; j < KW; j++) {
-	const int filter_elem_num = (c * KH + i) * KW + j; //index of this filter element
-	if(filter_elem_num % skip_every != skip_every-1) { //are we including this filter element?
-	  const int output_col = filter_elem_num - (filter_elem_num/skip_every); //calculate output column, taking skipping into account
-	  if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	    output[((n * reduced_filter_elem + output_col) * H_out + h) * W_out + w] = input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-	  else
-	    output[((n * reduced_filter_elem + output_col) * H_out + h) * W_out + w] = 0;
-	}
-      }
-    }
-  }
-}
-
-
-//This skips every xth row
-//H_eff is the number of rows calculated exactly
-__global__
-void convToGemmPerfRow(float * const __restrict__ output,
-		       const float * const __restrict input, const int N, const int C,
-		       const int H, const int W, const int KH, const int KW, const int V_pad,
-		       const int H_pad, const int H_out, const int W_out, const int V_stride,
-		       const int H_stride, const int x, const int start, const int H_eff){
-
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_eff * W_out); //output image number
-  const int c = tx % (C * H_eff * W_out) / (H_eff * W_out); //output chan number
-  const int h = tx % (H_eff * W_out) / W_out; //output height index (row number)
-  const int w = tx % W_out; //output width index (col number)
-  int past_start = (h % (x - 1) >= (x - 1 - start));
-  const int inH = (h / (x - 1) * x + h % (x-1) +
-		   past_start) * V_stride - V_pad; //input height index (row number)
-  const int inW = w * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    for(int i = 0; i < KH; i++) {
-      for(int j = 0; j < KW; j++) {
-	const int filter_elem_num = (c * KH + i) * KW + j; //index of this filter element
-
-	if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	  output[((n * C * KH * KW + filter_elem_num) * H_eff + h) * W_out + w] =
-	    input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-	else
-	  output[((n * C * KH * KW + filter_elem_num) * H_eff + h) * W_out + w] = 0;
-
-      }
-    }
-  }
-
-}
-
-
-//For use in tensorConvPerfCuda
-//Interpolates every xth row starting from x - 1 - start
-//N is total number of elements in final output array
-__global__
-void approxInterpolateRow(int N, int old_h, int n, int c, int h, int w,
-			  float *old_data, float *new_data, int x, int start){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-    int past_start = ((row % x) >= (x - 1 - start));
-
-    if(row == h-1)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + (old_h - 1) * (w) + col];
-    else if (row == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) + 0 * (w) + col];
-    else if(row % x == x - 1 - start){
-      int past_startO = ((row - 1) % x) > (x - 1 - start);
-      int oldIdx1 = n * (c * old_h * w) + ch * (old_h * w) +
-	((x-1) * ((row - 1) / x) + (row-1) % x - past_startO) * (w) + col;
-
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	(old_data[oldIdx1] + old_data[oldIdx1 + 1 * w]) / 2;
-    }
-    else
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * old_h * w) + ch * (old_h * w) +
-		 ((x-1) * (row / x) + row % x - past_start )  * (w) + col];
-
-
-  }
-
-}
-
-
-//This skips every xth row
-//W_eff is the number of cols calculated exactly
-__global__
-void convToGemmPerfCol(float * const __restrict__ output,
-		       const float * const __restrict input, const int N, const int C,
-		       const int H, const int W, const int KH, const int KW, const int V_pad,
-		       const int H_pad, const int H_out, const int W_out, const int V_stride,
-		       const int H_stride, const int x, const int start, const int W_eff){
-
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_out * W_eff); //output image number
-  const int c = tx % (C * H_out * W_eff) / (H_out * W_eff); //output chan number
-  const int h = tx % (H_out * W_eff) / W_eff; //output height index (row number)
-  const int w = tx % W_eff; //output width index (col number)
-  int past_start = (w % (x - 1)) >= (x - 1 - start);
-  const int inH = h * V_stride - V_pad; //input height index (row number)
-  const int inW = (w / (x - 1) * x + w % (x-1) +
-		   past_start) * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    for(int i = 0; i < KH; i++) {
-      for(int j = 0; j < KW; j++) {
-	const int filter_elem_num = (c * KH + i) * KW + j; //index of this filter element
-
-	if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	  output[((n * C * KH * KW + filter_elem_num) * H_out + h) * W_eff + w] =
-	    input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-	else
-	  output[((n * C * KH * KW + filter_elem_num) * H_out + h) * W_eff + w] = 0;
-
-      }
-    }
-  }
-
-}
-
-
-//For use in tensorConvPerfCuda
-//Interpolates every xth col starting from x - 1 - start
-//N is total number of elements in final output array
-__global__
-void approxInterpolateCol(int N, int old_w, int n, int c, int h, int w,
-			  float *old_data, float *new_data, int x, int start){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-    int past_start = ((col % x) >= (x - 1 - start));
-
-    if(col == w-1)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w) + old_w - 1];
-    else if (col == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * (old_w)];
-    else if(col % x == x - 1 - start){
-      int past_startO = ((col - 1) % x) > (x - 1 - start);
-      int oldIdx1 = n * (c * h * old_w) + ch * (h * old_w) + row * old_w +
-	((x-1) * ((col - 1) / x) + (col-1) % x - past_startO);
-
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	(old_data[oldIdx1] + old_data[oldIdx1 + 1]) / 2;
-    }
-    else
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[n * (c * h * old_w) + ch * (h * old_w) + row * old_w +
-		 ((x-1) * (col / x) + col % x - past_start)];
-
-  }
-
-}
-
-
-
-//start has to be less than row or less than col
-//row and col have to be >= 0
-//row = col = 1 means no perforation
-void* tensorConvPerfCuda(void* input_ptr, void* filter_ptr,
-			 int vertical_pad, int horizontal_pad, int vertical_stride,
-			 int horizontal_stride, int conv_mode, int conv_groups,
-			 int row, int col, int start){
-
-  INFO("*** TensorConvolution (output perforation) \n");
-  profileEvent("Conv");
-  Tensor* input = (Tensor*)input_ptr;
-  Tensor* filter = (Tensor*)filter_ptr;
-  //FIXME: Current hack to preserve backward compatibilty
-  if (conv_groups == 0) {
-    conv_groups = 1;
-  }
-  
-  Tensor* output;
-  // TODO: Support other cases;
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  profileEvent("H2F_start");
-  convertToFP32(input);
-  convertToFP32(filter);
-  profileEvent("H2F_end");
-  
-  int n, c, h, w; // output dimensions
-  n = input->dims.dim_sizes[0];
-  c = filter->dims.dim_sizes[0]; //number of filters
-  const int KH = filter->dims.dim_sizes[2];
-  const int KW = filter->dims.dim_sizes[3];
-
-  h = (2 * vertical_pad + input->dims.dim_sizes[2] - KH) / vertical_stride + 1;
-  int h_eff = h - h / row;
-  if(h % row > row - 1 - start)
-    h_eff = h_eff - 1;
-
-  w = (2 * horizontal_pad + input->dims.dim_sizes[3] - KW) / horizontal_stride + 1;
-  int w_eff = w - w / col;
-  if(w % col > col - 1 - start)
-    w_eff = w_eff - 1;
-
-
-  Tensor *new_output;
-  if(row > 1){
-    output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, // input->data_type,
-				     CUDNN_TENSOR_NCHW, n, c, h_eff, w);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    float * convData;
-    int convDataSize = sizeof(float) * n * num_filter_elem * h_eff * w;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 128;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h_eff * w + blockSize - 1) / blockSize;
-
-    convToGemmPerfRow<<<gridSize, blockSize>>>(convData, (float *)input->gpu_data, n,
-					       input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-					       input->dims.dim_sizes[3], KH, KW, vertical_pad,
-					       horizontal_pad, h, w,
-					       vertical_stride, horizontal_stride, row, start, h_eff);
-
-
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    float alpha = 1.0f, beta = 0.0f;
-    checkCudaErrors(cublasSgemmStridedBatched(cublasHandle,
-					      CUBLAS_OP_N, CUBLAS_OP_N,
-					      h_eff * w, c, num_filter_elem,
-					      &alpha,
-					      convData, h_eff * w, num_filter_elem * h_eff * w,
-					      (float *)filter->gpu_data, num_filter_elem, 0,
-					      &beta,
-					      (float *)output->gpu_data, h_eff * w, c * h_eff * w,
-					      n));
-
-    new_output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, // input->data_type,
-					 CUDNN_TENSOR_NCHW, n, c, h, w);
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(new_output, DEVICE);
-
-    //interpolate
-    int numBlocks = (n * c * h * w  + 127) / 128;
-    approxInterpolateRow<<<numBlocks,128>>>(n * c * h * w, h_eff, n, c, h, w,
-					    (float *) output->gpu_data, (float *) new_output->gpu_data,
-					    row, start);
-    cudaDeviceSynchronize();
-
-    freeTensor(output);
-    cudaFree(convData);
-  }
-  else if(col > 1){
-    
-    output = (Tensor*)create4DTensor((cudnnDataType_t) float_type, //input->data_type,
-				     CUDNN_TENSOR_NCHW, n, c, h, w_eff);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    float * convData;
-    int convDataSize = sizeof(float) * n * num_filter_elem * h * w_eff;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 128;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h * w_eff + blockSize - 1) / blockSize;
-
-    convToGemmPerfCol<<<gridSize, blockSize>>>(convData, (float *)input->gpu_data, n,
-					       input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-					       input->dims.dim_sizes[3], KH, KW,
-					       vertical_pad, horizontal_pad, h, w,
-					       vertical_stride, horizontal_stride, col, start, w_eff);
-
-
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    float alpha = 1.0f, beta = 0.0f;
-    checkCudaErrors(cublasSgemmStridedBatched(cublasHandle,
-					      CUBLAS_OP_N, CUBLAS_OP_N,
-					      h * w_eff, c, num_filter_elem,
-					      &alpha,
-					      convData, h * w_eff, num_filter_elem * h * w_eff,
-					      (float *)filter->gpu_data, num_filter_elem, 0,
-					      &beta,
-					      (float *)output->gpu_data, h * w_eff, c * h * w_eff,
-					      n));
-
-    new_output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, // input->data_type,
-					 CUDNN_TENSOR_NCHW, n, c, h, w);
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(new_output, DEVICE);
-
-    //interpolate
-    int numBlocks = (n * c * h * w  + 127) / 128;
-    approxInterpolateCol<<<numBlocks,128>>>(n * c * h * w, w_eff, n, c, h, w,
-					    (float *)output->gpu_data, (float *)new_output->gpu_data,
-					    col, start);
-    cudaDeviceSynchronize();
-
-    freeTensor(output);
-    cudaFree(convData);
-  }
-  else{
-    output = (Tensor*)create4DTensor((cudnnDataType_t) float_type, // input->data_type,
-				     CUDNN_TENSOR_NCHW, n, c, h, w);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    float * convData;
-    int convDataSize = sizeof(float) * n * num_filter_elem * h * w;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 128;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h * w + blockSize - 1) / blockSize;
-    convToGemmApprox<<<gridSize, blockSize>>>(convData, (float *)input->gpu_data, n,
-					      input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-					      input->dims.dim_sizes[3], KH, KW,
-					      vertical_pad, horizontal_pad, h, w,
-					      vertical_stride, horizontal_stride,
-					      num_filter_elem, c * h * w);
-    checkCudaErrors(cudaDeviceSynchronize());
-    //Do the matrix multiplication. Want to multiply convData by filter->gpu_data[f * chan * KH * KW]
-    float alpha = 1.0f, beta = 0.0f;
-    checkCudaErrors(cublasSgemmStridedBatched(cublasHandle,
-					      CUBLAS_OP_N, CUBLAS_OP_N,
-					      h * w, c, num_filter_elem,
-					      &alpha,
-					      convData, h * w, num_filter_elem * h * w,
-					      (float *)filter->gpu_data, num_filter_elem, 0,
-					      &beta,
-					      (float *)output->gpu_data, h * w, c * h * w,
-					      n));
-
-    new_output = output;
-    cudaFree(convData);
-  }
-
-
-  profileEvent("Conv_end"); //, true);
- 
-  
-  return new_output;
-}
-
-__global__
-void convToGemmPerfRowHalf(__half * const __restrict__ output,
-			   const __half * const __restrict input, const int N, const int C,
-			   const int H, const int W, const int KH, const int KW, const int V_pad,
-			   const int H_pad, const int H_out, const int W_out, const int V_stride,
-			   const int H_stride, const int x, const int start, const int H_eff){
-
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_eff * W_out); //output image number
-  const int c = tx % (C * H_eff * W_out) / (H_eff * W_out); //output chan number
-  const int h = tx % (H_eff * W_out) / W_out; //output height index (row number)
-  const int w = tx % W_out; //output width index (col number)
-  int past_start = (h % (x - 1) >= (x - 1 - start));
-  const int inH = (h / (x - 1) * x + h % (x-1) +
-		   past_start) * V_stride - V_pad; //input height index (row number)
-  const int inW = w * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    for(int i = 0; i < KH; i++) {
-      for(int j = 0; j < KW; j++) {
-	const int filter_elem_num = (c * KH + i) * KW + j; //index of this filter element
-
-	if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	  output[((filter_elem_num * N + n) * H_eff + h) * W_out + w] =
-	    input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-	else
-	  output[((filter_elem_num * N + n) * H_eff + h) * W_out + w] = 0;
-
-      }
-    }
-  }
-
-}
-
-
-//For use in tensorConvPerfCuda
-//Interpolates every xth row starting from x - 1 - start
-//N is total number of elements in final output array
-__global__
-void approxInterpolateRowHalf(int N, int old_h, int b, int c, int h, int w,
-			      __half *old_data, __half *new_data, int x, int start){
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-    int past_start = ((row % x) >= (x - 1 - start));
-
-    if(row == h-1)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[ch * (b * old_h * w) + n * (old_h * w) + (old_h - 1) * (w) + col];
-    else if (row == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[ch * (b * old_h * w) + n * (old_h * w) + 0 * (w) + col];
-    else if(row % x == x - 1 - start){
-      int past_startO = ((row - 1) % x) > (x - 1 - start);
-      int oldIdx1 = ch * (b * old_h * w) + n * (old_h * w) +
-	((x-1) * ((row - 1) / x) + (row-1) % x - past_startO) * (w) + col;
-
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	__hdiv(__hadd(old_data[oldIdx1], old_data[oldIdx1 + 1 * w]), 2);
-    }
-    else
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[ch * (b * old_h * w) + n * (old_h * w) +
-		 ((x-1) * (row / x) + row % x - past_start )  * (w) + col];
-
-
-  }
-
-}
-
-
-//This skips every xth row
-//W_eff is the number of cols calculated exactly
-__global__
-void convToGemmPerfColHalf(__half * const __restrict__ output,
-			   const __half * const __restrict input, const int N, const int C,
-			   const int H, const int W, const int KH, const int KW, const int V_pad,
-			   const int H_pad, const int H_out, const int W_out, const int V_stride,
-			   const int H_stride, const int x, const int start, const int W_eff){
-
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_out * W_eff); //output image number
-  const int c = tx % (C * H_out * W_eff) / (H_out * W_eff); //output chan number
-  const int h = tx % (H_out * W_eff) / W_eff; //output height index (row number)
-  const int w = tx % W_eff; //output width index (col number)
-  int past_start = (w % (x - 1)) >= (x - 1 - start);
-  const int inH = h * V_stride - V_pad; //input height index (row number)
-  const int inW = (w / (x - 1) * x + w % (x-1) +
-		   past_start) * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    for(int i = 0; i < KH; i++) {
-      for(int j = 0; j < KW; j++) {
-	const int filter_elem_num = (c * KH + i) * KW + j; //index of this filter element
-
-	if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	  output[((filter_elem_num * N + n) * H_out + h) * W_eff + w] =
-	    input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-	else
-	  output[((filter_elem_num * N + n) * H_out + h) * W_eff + w] = 0;
-
-      }
-    }
-  }
-
-}
-
-
-//For use in tensorConvPerfCuda
-//Interpolates every xth col starting from x - 1 - start
-//N is total number of elements in final output array
-__global__
-void approxInterpolateColHalf(int N, int old_w, int b, int c, int h, int w,
-			      __half *old_data, __half *new_data, int x, int start){
-
-
-  int index = blockIdx.x * blockDim.x + threadIdx.x;
-  int stride = blockDim.x * gridDim.x;
-
-  for(int i = index; i < N; i += stride){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n = i / (c * h * w);
-    int past_start = ((col % x) >= (x - 1 - start));
-
-    if(col == w-1)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[ch * (b * h * old_w) + n * (h * old_w) + row * (old_w) + old_w - 1];
-    else if (col == 0)
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[ch * (b * h * old_w) + n * (h * old_w) + row * (old_w)];
-    else if(col % x == x - 1 - start){
-      int past_startO = ((col - 1) % x) > (x - 1 - start);
-      int oldIdx1 = ch * (b * h * old_w) + n * (h * old_w) + row * old_w +
-	((x-1) * ((col - 1) / x) + (col-1) % x - past_startO);
-
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	__hdiv(__hadd(old_data[oldIdx1], old_data[oldIdx1 + 1]), 2);
-    }
-    else
-      new_data[n * (c * h * w) + ch * (h * w) + row * (w) + col] =
-	old_data[ch * (b * h * old_w) + n * (h * old_w) + row * old_w +
-		 ((x-1) * (col / x) + col % x - past_start)];
-
-  } 
-}
-
-__global__
-void switchMatrix(int N, int n, int c, int h, int w, __half *old_data, __half *new_data){
-
-  int i = blockIdx.x * blockDim.x + threadIdx.x;
-  if(i < N){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n_new = i / (c * h * w);
-
-    new_data[((n_new * c + ch) * h + row ) * w + col] =
-      old_data[((ch * n + n_new) * h + row ) * w + col];
-  }
-
-}
-						
-
-//produces N COL MAJOR matrixes with H_out*W_out rows and reduced_filter_elem cols
-__global__ void convToGemmApproxHalfN(__half * const __restrict__ output,
-				     const __half * const __restrict input, const int N, const int C,
-				     const int H, const int W, const int KH, const int KW, const int V_pad,
-				     const int H_pad, const int H_out, const int W_out, const int V_stride,
-				     const int H_stride, const int reduced_filter_elem,
-				     const int skip_every) {
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_out * W_out); //output image number
-  const int c = tx % (C * H_out * W_out) / (H_out * W_out); //output chan number
-  const int h = tx % (H_out * W_out) / W_out; //output height index (row number)
-  const int w = tx % W_out; //output width index (col number)
-  const int inH = h * V_stride - V_pad; //input height index (row number)
-  const int inW = w * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    for(int i = 0; i < KH; i++) {
-      for(int j = 0; j < KW; j++) {
-	const int filter_elem_num = (c * KH + i) * KW + j; //index of this filter element
-	const int output_col = filter_elem_num; //calculate output column, taking skipping into account
-	if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	  output[((output_col * N + n) * H_out + h) * W_out + w] =
-	    input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-	else
-	  output[((output_col * N + n) * H_out + h) * W_out + w] = 0;
-
-      }
-    }
-  }
-}
-
-//start has to be less than row or less than col
-//row and col have to be >= 0
-//row = col = 1 means no perforation
-void* tensorConvPerfCudaHalf(void* input_ptr, void* filter_ptr,
-			     int vertical_pad, int horizontal_pad, int vertical_stride,
-			     int horizontal_stride, int conv_mode, int conv_groups,
-			     int row, int col, int start){
-
-  INFO("*** TensorConvolution half perforation \n");
-  profileEvent("#Conv");
-
-  Tensor* input = (Tensor*)input_ptr;
-  Tensor* filter = (Tensor*)filter_ptr;
-  //FIXME: Current hack to preserve backward compatibilty
-  if (conv_groups == 0) {
-    conv_groups = 1;
-  }
-
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  profileEvent("F2H_start");
-  convertToFP16(input);
-  convertToFP16(filter);
-  profileEvent("F2H_end");
-
-  Tensor* output_half;
-  int n, c, h, w; // output dimensions
-  n = input->dims.dim_sizes[0];
-  c = filter->dims.dim_sizes[0]; //number of filters
-  const int KH = filter->dims.dim_sizes[2];
-  const int KW = filter->dims.dim_sizes[3];
-
-  h = (2 * vertical_pad + input->dims.dim_sizes[2] - KH) / vertical_stride + 1;
-  int h_eff = h - h / row;
-  if(h % row > row - 1 - start)
-    h_eff = h_eff - 1;
-
-  w = (2 * horizontal_pad + input->dims.dim_sizes[3] - KW) / horizontal_stride + 1;
-  int w_eff = w - w / col;
-  if(w % col > col - 1 - start)
-    w_eff = w_eff - 1;
-
-
-  Tensor *new_output;
-  if(row > 1){
-    output_half = (Tensor*)create4DTensor((cudnnDataType_t) half_type, CUDNN_TENSOR_NCHW,
-					  n, c, h_eff, w);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output_half, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    __half * convData;
-    int convDataSize = sizeof(__half) * n * num_filter_elem * h_eff * w;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 256;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h_eff * w + blockSize - 1) / blockSize;
-
-    convToGemmPerfRowHalf<<<gridSize, blockSize>>>(convData, (__half *)input->gpu_half_data, n,
-						   input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-						   input->dims.dim_sizes[3], KH, KW, vertical_pad,
-						   horizontal_pad, h, w,
-						   vertical_stride, horizontal_stride, row, start, h_eff);
-
-
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    const __half alf = approx_float_to_half(1.0);
-    const __half bet = approx_float_to_half(0.0);
-    const __half *alpha_half = &alf;
-    const __half *beta_half = &bet;
-
-    checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				 n * h_eff * w, c, num_filter_elem,
-				 alpha_half,
-				 convData, CUDA_R_16F, n * h_eff * w,
-				 (__half*) filter->gpu_half_data, CUDA_R_16F, num_filter_elem,
-				 beta_half,
-				 (__half*) output_half->gpu_half_data, CUDA_R_16F, n * h_eff * w,
-				 CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-
-    
-    new_output = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-					 CUDNN_TENSOR_NCHW, n, c, h, w);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(new_output, DEVICE);
-
-    //interpolate
-    int numBlocks = (n * c * h * w  + 255) / 256;
-    approxInterpolateRowHalf<<<numBlocks,256>>>(n * c * h * w, h_eff, n, c, h, w,
-						(__half *)output_half->gpu_half_data,
-						(__half *)new_output->gpu_half_data,
-						row, start);
-    cudaDeviceSynchronize();
-
-    freeTensor(output_half);
-    cudaFree(convData);
-  }
-  else if(col > 1){
-    output_half = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-					  CUDNN_TENSOR_NCHW, n, c, h, w_eff);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output_half, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    __half * convData;
-    int convDataSize = sizeof(__half) * n * num_filter_elem * h * w_eff;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 256;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h * w_eff + blockSize - 1) / blockSize;
-
-    convToGemmPerfColHalf<<<gridSize, blockSize>>>(convData, (__half *)input->gpu_half_data, n,
-						   input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-						   input->dims.dim_sizes[3], KH, KW, vertical_pad,
-						   horizontal_pad, h, w,
-						   vertical_stride, horizontal_stride, col, start, w_eff);
-
-
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    const __half alf = approx_float_to_half(1.0);
-    const __half bet = approx_float_to_half(0.0);
-    const __half *alpha_half = &alf;
-    const __half *beta_half = &bet;
-
-    
-    checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				 n * h * w_eff, c, num_filter_elem,
-				 alpha_half,
-				 convData, CUDA_R_16F, n * h * w_eff,
-				 (__half*) filter->gpu_half_data, CUDA_R_16F, num_filter_elem,
-				 beta_half,
-				 (__half*) output_half->gpu_half_data, CUDA_R_16F, n * h * w_eff,
-				 CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-
-    
-    new_output = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-					 CUDNN_TENSOR_NCHW, n, c, h, w);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(new_output, DEVICE);
-
-    //interpolate
-    int numBlocks = (n * c * h * w  + 255) / 256;
-    approxInterpolateColHalf<<<numBlocks,256>>>(n * c * h * w, w_eff, n, c, h, w,
-						(__half *)output_half->gpu_half_data,
-						(__half *)new_output->gpu_half_data,
-						col, start);
-    
-    cudaDeviceSynchronize();
-
-    freeTensor(output_half);
-    cudaFree(convData);
-
-  }
-  else{
-    output_half = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-					  CUDNN_TENSOR_NCHW, c, n, h, w);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output_half, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    __half * convData;
-    int convDataSize = sizeof(__half) * n * num_filter_elem * h * w;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 256;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h * w + blockSize - 1) / blockSize;
-    convToGemmApproxHalfN<<<gridSize, blockSize>>>(convData, (__half *)input->gpu_half_data, n,
-						   input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-						   input->dims.dim_sizes[3], KH, KW,
-						   vertical_pad, horizontal_pad, h, w,
-						   vertical_stride, horizontal_stride,
-						   num_filter_elem, c * h * w);
-    checkCudaErrors(cudaDeviceSynchronize());
-    //Do the matrix multiplication. Want to multiply convData by filter->gpu_data[f * chan * KH * KW]
-    const __half alf = approx_float_to_half(1.0);
-    const __half bet = approx_float_to_half(0.0);
-    const __half *alpha_half = &alf;
-    const __half *beta_half = &bet;
-
-    checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				 n * h * w, c, num_filter_elem,
-				 alpha_half,
-				 convData, CUDA_R_16F, n * h * w,
-				 (__half*) filter->gpu_half_data, CUDA_R_16F, num_filter_elem,
-				 beta_half,
-				 (__half*) output_half->gpu_half_data, CUDA_R_16F, n * h * w,
-				 CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-
-
-
-    // profileEvent("gemm_end", true);
-    new_output = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-					  CUDNN_TENSOR_NCHW, n, c, h, w);
-    changeTensorPlacement(new_output, DEVICE);
-
-    
-    int numBlocks = (n * c * h * w  + 255) / 256;
-    switchMatrix<<<numBlocks,256>>>(n * c * h * w, n, c, h, w,
-				    (__half *)output_half->gpu_half_data,
-				    (__half *)new_output->gpu_half_data);
-
-    checkCudaErrors(cudaDeviceSynchronize());
-    
-    cudaFree(convData);
-    freeTensor(output_half);
-  }
-
-  //profileEvent("Conv_end", true);
-
-  profileEvent("H2F_start");
-  convertToFP32_offline(new_output);
-  profileEvent("H2F_end");
-
-  profileEvent("#Conv_end"); //, true);
-
-  return new_output;
-}
-
-
-//produces COL MAJOR matrix with reduced_filter_elem rows and NF cols
-__global__ void createReducedFiltersHalf(__half * output,
-					 const __half * const __restrict input, const int NF,
-					 const int num_filter_elem, const int reduced_filter_elem,
-					 const int skip_every, const int skip_offset) {
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int fIdx = tx / num_filter_elem; //filter index
-  const int offset = tx % num_filter_elem; //offset within filter
-  if(fIdx < NF) { //is thread id within bounds?
-    if(offset % skip_every != skip_every-1-skip_offset) { //are we including this filter element?
-      const int output_row = offset - ((offset + skip_every)/skip_every); //correct for skip_every = 2 
-      output[fIdx*reduced_filter_elem + output_row] =
-	__hmul((skip_every * 1.0 / (skip_every - 1)), input[tx]);
-    }
-  }
-}
-
-
-//COL Major matrix with N*H*W columns and reduced_filter_elem rows
-//skip_every = 1 means no perforation
-__global__ void convToGemmHalfInput(__half * const __restrict__ output,
-				    const __half * const __restrict input,
-				    const int N, const int C,
-				    const int H, const int W,
-				    const int KH, const int KW, const int V_pad,
-				    const int H_pad, const int H_out,
-				    const int W_out, const int V_stride,
-				    const int H_stride, const int reduced_filter_elem,
-				    const int skip_every, const int skip_offset) {
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_out * W_out); //output image number
-  const int c = tx % (C * H_out * W_out) / (H_out * W_out); //output chan number
-  const int h = tx % (H_out * W_out) / W_out; //output height index (row number)
-  const int w = tx % W_out; //output width index (col number)
-  const int inH = h * V_stride - V_pad; //input height index (row number)
-  const int inW = w * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    for(int i = 0; i < KH; i++) {
-      for(int j = 0; j < KW; j++) {
-	const int filter_elem_num = (c * KH + i) * KW + j; //index of this filter element
-	
-	if(filter_elem_num % skip_every != skip_every-1-skip_offset) {
-	  int output_col = filter_elem_num -
-	    ((filter_elem_num + skip_every)/skip_every);
-	  if(skip_every == 1)
-	    output_col = filter_elem_num;
-	  if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	    output[((output_col*N + n) * H_out + h) * W_out + w] =
-	      input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-	  else
-	    output[((output_col*N + n) * H_out + h) * W_out + w] = 0;
-	}
-      }
-    }
-  }
-}
-
-
-//COL Major matrix with N*H*W columns and reduced_filter_elem rows
-//Can only be used when skipping every other element in input sampling
-__global__ void convToGemmHalfInput2(__half * const __restrict__ output,
-				     const __half * const __restrict input,
-				     const int N, const int C,
-				     const int H, const int W,
-				     const int KH, const int KW, const int V_pad,
-				     const int H_pad, const int H_out,
-				     const int W_out, const int V_stride,
-				     const int H_stride, const int reduced_filter_elem,
-				     const int skip_every, const int skip_offset) {
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_out * W_out); //output image number
-  const int c = tx % (C * H_out * W_out) / (H_out * W_out); //output chan number
-  const int h = tx % (H_out * W_out) / W_out; //output height index (row number)
-  const int w = tx % W_out; //output width index (col number)
-  const int inH = h * V_stride - V_pad; //input height index (row number)
-  const int inW = w * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    const int filter_elem_num = c * KH * KW;
-    for(int l = (filter_elem_num % 2) + skip_offset; l < KH * KW; l+=2) {
-      int i = l / KW;
-      int j = l % KW;
-
-      const int new_idx = filter_elem_num + i * KW + j;
-      const int output_col = new_idx - ((new_idx + skip_every)/2); //new output column
-      if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	output[((output_col*N + n) * H_out + h) * W_out + w] =
-	  input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-      else
-	output[((output_col*N + n) * H_out + h) * W_out + w] = 0;
-
-    }
-  }
-}
-
-//Baseline: skip_offset = skip_every = 1
-void* tensorConvInputHalf(void* input_ptr, void* filter_ptr,
-			  int vertical_pad, int horizontal_pad, int vertical_stride,
-			  int horizontal_stride, int conv_mode, int conv_groups,
-			  int skip_every, int skip_offset){
-
-  INFO("*** TensorHConvolution input sampling \n");
-  profileEvent("#Conv");
-
-  Tensor* input = (Tensor*)input_ptr;
-  Tensor* filter = (Tensor*)filter_ptr;
-  //FIXME: Current hack to preserve backward compatibilty
-  if (conv_groups == 0) {
-    conv_groups = 1;
-  }
-
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  profileEvent("F2H_start");
-  convertToFP16(input);
-  convertToFP16(filter);
-  profileEvent("F2H_end");
-
-  Tensor* output;
-  Tensor* new_output;
-  // TODO: Support other cases;
-  int n, c, h, w; // output dimensions
-  n = input->dims.dim_sizes[0];
-  c = filter->dims.dim_sizes[0]; //number of filters
-  const int KH = filter->dims.dim_sizes[2];
-  const int KW = filter->dims.dim_sizes[3];
-  h = (2 * vertical_pad + input->dims.dim_sizes[2] - KH) / vertical_stride + 1;
-  w = (2 * horizontal_pad + input->dims.dim_sizes[3] - KW) / horizontal_stride + 1;
-  output = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-				   CUDNN_TENSOR_NCHW, n, c, h, w);
-  new_output = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-				       CUDNN_TENSOR_NCHW, n, c, h, w); 
-
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE);
-  changeTensorPlacement(new_output, DEVICE);
-  // NOTE: Necessary to insert the above call for every output tensor
-
-  //total number of filter elem
-  const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-  //reduced number after skipping
-  int reduced_filter_elem;
-  if(skip_offset != skip_every){
-    reduced_filter_elem = num_filter_elem - (num_filter_elem/skip_every);
-    if(num_filter_elem % skip_every > skip_every - 1 - skip_offset)
-      reduced_filter_elem = reduced_filter_elem - 1;
-  }
-  else
-    reduced_filter_elem = num_filter_elem;
-  
-  __half * convData;
-  int convDataSize = sizeof(__half) * n * reduced_filter_elem * h * w;
-  checkCudaErrors(cudaMalloc(&convData, convDataSize));
-  __half * reducedFilter;
-  checkCudaErrors(cudaMalloc(&reducedFilter, sizeof(__half) * c * reduced_filter_elem));
-  const int filtBlockSize = 128;
-  const int filtGridSize = (c * num_filter_elem + filtBlockSize - 1) / filtBlockSize;
-  if(skip_offset != skip_every)
-    createReducedFiltersHalf<<<filtGridSize, filtBlockSize>>>(reducedFilter,
-							      (__half *)filter->gpu_half_data,
-							      c, num_filter_elem, reduced_filter_elem,
-							      skip_every, skip_offset);
-  checkCudaErrors(cudaDeviceSynchronize());
-
-  const int blockSize = 256;
-  const int gridSize = (n * input->dims.dim_sizes[1] * h * w + blockSize - 1) / blockSize;
-  if(skip_every == 2){
-    convToGemmHalfInput2<<<gridSize, blockSize>>>(convData, (__half *)input->gpu_half_data, n,
-						  input->dims.dim_sizes[1],
-						  input->dims.dim_sizes[2],
-						  input->dims.dim_sizes[3],
-						  KH, KW, vertical_pad, horizontal_pad,
-						  h, w, vertical_stride, horizontal_stride,
-						  reduced_filter_elem, skip_every,
-						  skip_offset);
-  }
-  else{
-    convToGemmHalfInput<<<gridSize, blockSize>>>(convData, (__half *)input->gpu_half_data, n,
-						 input->dims.dim_sizes[1],
-						 input->dims.dim_sizes[2],
-						 input->dims.dim_sizes[3],
-						 KH, KW, vertical_pad, horizontal_pad,
-						 h, w, vertical_stride, horizontal_stride,
-						 reduced_filter_elem, skip_every,
-						 skip_offset);
-  }
- 
-  checkCudaErrors(cudaDeviceSynchronize());
-  //Do the matrix multiplication. Want to multiply convData by filter->gpu_data[f * chan * KH * KW]
-  const __half alf = approx_float_to_half(1.0);
-  const __half bet = approx_float_to_half(0.0);
-  const __half *alpha_half = &alf;
-  const __half *beta_half = &bet;
-
-  if(skip_offset != skip_every)
-    checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				 n * h * w, c, reduced_filter_elem,
-				 alpha_half,
-				 convData, CUDA_R_16F, n * h * w,
-				 reducedFilter, CUDA_R_16F, reduced_filter_elem,
-				 beta_half,
-				 (__half*) output->gpu_half_data, CUDA_R_16F, n * h * w,
-				 CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-  else
-    checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				 n * h * w, c, reduced_filter_elem,
-				 alpha_half,
-				 convData, CUDA_R_16F, n * h * w,
-				 (__half*) filter->gpu_half_data, CUDA_R_16F,
-				 reduced_filter_elem,
-				 beta_half,
-				 (__half*) output->gpu_half_data, CUDA_R_16F, n * h * w,
-				 CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-
-
-  int numBlocks = (n * c * h * w  + 255) / 256;
-  switchMatrix<<<numBlocks,256>>>(n * c * h * w, n, c, h, w,
-				  (__half *)output->gpu_half_data,
-				  (__half *)new_output->gpu_half_data);
-
-  checkCudaErrors(cudaDeviceSynchronize());
-
-  cudaFree(convData);
-  cudaFree(reducedFilter);
-  freeTensor(output);
-
-  profileEvent("H2F_start");
-
-  // NOTE: Transforming half precision output to single precision
-  convertToFP32_offline(new_output);
-
-  profileEvent("H2F_end");
-
-  profileEvent("#Conv_end", true);
-
-  return new_output;
-
-}
-
-//COL Major matrix with N*H*W columns and reduced_filter_elem rows
-//skip_every = 1 means no perforation
-__global__ void convToGemmFullInput(float * const __restrict__ output,
-				    const float * const __restrict input,
-				    const int N, const int C,
-				    const int H, const int W,
-				    const int KH, const int KW, const int V_pad,
-				    const int H_pad, const int H_out,
-				    const int W_out, const int V_stride,
-				    const int H_stride, const int reduced_filter_elem,
-				    const int skip_every, const int skip_offset) {
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_out * W_out); //output image number
-  const int c = tx % (C * H_out * W_out) / (H_out * W_out); //output chan number
-  const int h = tx % (H_out * W_out) / W_out; //output height index (row number)
-  const int w = tx % W_out; //output width index (col number)
-  const int inH = h * V_stride - V_pad; //input height index (row number)
-  const int inW = w * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    for(int i = 0; i < KH; i++) {
-      for(int j = 0; j < KW; j++) {
-	const int filter_elem_num = (c * KH + i) * KW + j; //index of this filter element
-
-	if(filter_elem_num % skip_every != skip_every-1-skip_offset) {
-	    int output_col = filter_elem_num -
-	      ((filter_elem_num + skip_every)/skip_every);
-	    if(skip_every == 1)
-	      output_col = filter_elem_num;
-	    if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	      output[((output_col*N + n) * H_out + h) * W_out + w] =
-		input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-	    else
-	      output[((output_col*N + n) * H_out + h) * W_out + w] = 0;
-	}
-      }
-    }
-  }
-}
-
-
-//COL Major matrix with N*H*W columns and reduced_filter_elem rows
-//Can only be used when skipping every other element in input sampling
-__global__ void convToGemmFullInput2(float * const __restrict__ output,
-				     const float * const __restrict input,
-				     const int N, const int C,
-				     const int H, const int W,
-				     const int KH, const int KW, const int V_pad,
-				     const int H_pad, const int H_out,
-				     const int W_out, const int V_stride,
-				     const int H_stride, const int reduced_filter_elem,
-				     const int skip_every, const int skip_offset) {
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int n = tx / (C * H_out * W_out); //output image number
-  const int c = tx % (C * H_out * W_out) / (H_out * W_out); //output chan number
-  const int h = tx % (H_out * W_out) / W_out; //output height index (row number)
-  const int w = tx % W_out; //output width index (col number)
-  const int inH = h * V_stride - V_pad; //input height index (row number)
-  const int inW = w * H_stride - H_pad; //input width index (col number)
-  if(n < N) { //is thread id within bounds?
-    const int filter_elem_num = c * KH * KW;
-    for(int l = (filter_elem_num % 2) + skip_offset; l < KH * KW; l+=2) {
-      int i = l / KW;
-      int j = l % KW;
-
-      const int new_idx = filter_elem_num + i * KW + j;
-      const int output_col = new_idx - ((new_idx + skip_every)/2); //new output column
-      if(inH + i >= 0 && inH + i < H && inW + j >= 0 && inW + j < W)
-	output[((output_col*N + n) * H_out + h) * W_out + w] =
-	  input[((n * C + c) * H + (inH + i)) * W + (inW + j)];
-      else
-	output[((output_col*N + n) * H_out + h) * W_out + w] = 0;
-
-    }
-  }
-}
-
-
-//produces COL MAJOR matrix with reduced_filter_elem rows and NF cols
-__global__ void createReducedFiltersFull(float * output,
-					 const float * const __restrict input, const int NF,
-					 const int num_filter_elem, const int reduced_filter_elem,
-					 const int skip_every, const int skip_offset) {
-  const int tx = blockDim.x * blockIdx.x + threadIdx.x; //thread id
-  const int fIdx = tx / num_filter_elem; //filter index
-  const int offset = tx % num_filter_elem; //offset within filter
-  if(fIdx < NF) { //is thread id within bounds?
-    if(offset % skip_every != skip_every-1-skip_offset) { //are we including this filter element?
-      const int output_row = offset - ((offset + skip_every)/skip_every); //correct for skip_every = 2
-            output[fIdx*reduced_filter_elem + output_row] =
-	      (skip_every * 1.0 / (skip_every - 1)) * input[tx];
-    }
-  }
-}
-
-__global__
-void switchMatrixFull(int N, int n, int c, int h, int w,
-		      float *old_data, float *new_data){
-
-  int i = blockIdx.x * blockDim.x + threadIdx.x;
-  if(i < N){
-    int col = ((i % (c * h * w)) % (h * w)) % w;
-    int row = ((i % (c * h * w)) % (h * w)) / w;
-    int ch = (i % (c * h * w)) / (h * w);
-    int n_new = i / (c * h * w);
-
-    new_data[((n_new * c + ch) * h + row ) * w + col] =
-      old_data[((ch * n + n_new) * h + row ) * w + col];
-  }
-
-}
-
-void* tensorConvApprox(void* input_ptr, void* filter_ptr,
-		       int vertical_pad, int horizontal_pad, int vertical_stride,
-		       int horizontal_stride, int conv_mode, int conv_groups,
-		       int row, int col, int skip_every, int offset){
-
-  INFO("*** TensorConvolution approximation \n");
-  profileEvent("Conv");
-
-  Tensor* input = (Tensor*)input_ptr;
-  Tensor* filter = (Tensor*)filter_ptr;
-  //FIXME: Current hack to preserve backward compatibilty
-  if (conv_groups == 0) {
-    conv_groups = 1;
-  }
-
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  //profileEvent("H2F_start");
-  convertToFP32(input);
-  convertToFP32(filter);
-  //profileEvent("H2F_end");
-
-  int n, c, h, w; // output dimensions
-  n = input->dims.dim_sizes[0];
-  c = filter->dims.dim_sizes[0]; //number of filters
-  const int KH = filter->dims.dim_sizes[2];
-  const int KW = filter->dims.dim_sizes[3];
-
-  h = (2 * vertical_pad + input->dims.dim_sizes[2] - KH) / vertical_stride + 1;
-  int h_eff = h - h / row;
-  if(h % row > row - 1 - offset)
-    h_eff = h_eff - 1;
-
-  w = (2 * horizontal_pad + input->dims.dim_sizes[3] - KW) / horizontal_stride + 1;
-  int w_eff = w - w / col;
-  if(w % col > col - 1 - offset)
-    w_eff = w_eff - 1;
-
-
-  Tensor *new_output = (Tensor*)create4DTensor((cudnnDataType_t) float_type,
-				       CUDNN_TENSOR_NCHW, n, c, h, w);
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(new_output, DEVICE);
-
-  if(row > 1){
-    Tensor *output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, // input->data_type,
-				      CUDNN_TENSOR_NCHW, n, c, h_eff, w);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    float * convData;
-    int convDataSize = sizeof(float) * n * num_filter_elem * h_eff * w;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 128;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h_eff * w + blockSize - 1) / blockSize;
-
-    convToGemmPerfRow<<<gridSize, blockSize>>>(convData, (float *)input->gpu_data, n,
-					       input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-					       input->dims.dim_sizes[3], KH, KW, vertical_pad,
-					       horizontal_pad, h, w,
-					       vertical_stride, horizontal_stride,
-					       row, offset, h_eff);
-
-
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    float alpha = 1.0f, beta = 0.0f;
-    checkCudaErrors(cublasSgemmStridedBatched(cublasHandle,
-					      CUBLAS_OP_N, CUBLAS_OP_N,
-					      h_eff * w, c, num_filter_elem,
-					      &alpha,
-					      convData, h_eff * w, num_filter_elem * h_eff * w,
-					      (float *)filter->gpu_data, num_filter_elem, 0,
-					      &beta,
-					      (float *)output->gpu_data, h_eff * w, c * h_eff * w,
-					      n));
-
-    new_output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, // input->data_type,
-					  CUDNN_TENSOR_NCHW, n, c, h, w);
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(new_output, DEVICE);
-
-    //interpolate
-    int numBlocks = (n * c * h * w  + 127) / 128;
-    approxInterpolateRow<<<numBlocks,128>>>(n * c * h * w, h_eff, n, c, h, w,
-					    (float *) output->gpu_data, (float *) new_output->gpu_data,
-					    row, offset);
-    cudaDeviceSynchronize();
-
-    freeTensor(output);
-    cudaFree(convData);
-  }
-  else if(col > 1){
-    
-    Tensor *output = (Tensor*)create4DTensor((cudnnDataType_t) float_type, //input->data_type,
-				     CUDNN_TENSOR_NCHW, n, c, h, w_eff);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    float * convData;
-    int convDataSize = sizeof(float) * n * num_filter_elem * h * w_eff;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 128;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h * w_eff + blockSize - 1) / blockSize;
-
-    convToGemmPerfCol<<<gridSize, blockSize>>>(convData, (float *)input->gpu_data, n,
-					       input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-					       input->dims.dim_sizes[3], KH, KW,
-					       vertical_pad, horizontal_pad, h, w,
-					       vertical_stride, horizontal_stride,
-					       col, offset, w_eff);
-
-
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    float alpha = 1.0f, beta = 0.0f;
-    checkCudaErrors(cublasSgemmStridedBatched(cublasHandle,
-					      CUBLAS_OP_N, CUBLAS_OP_N,
-					      h * w_eff, c, num_filter_elem,
-					      &alpha,
-					      convData, h * w_eff, num_filter_elem * h * w_eff,
-					      (float *)filter->gpu_data, num_filter_elem, 0,
-					      &beta,
-					      (float *)output->gpu_data, h * w_eff, c * h * w_eff,
-					      n));
-
-    new_output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, // input->data_type,
-					  CUDNN_TENSOR_NCHW, n, c, h, w);
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(new_output, DEVICE);
-
-    //interpolate
-    int numBlocks = (n * c * h * w  + 127) / 128;
-    approxInterpolateCol<<<numBlocks,128>>>(n * c * h * w, w_eff, n, c, h, w,
-					    (float *)output->gpu_data, (float *)new_output->gpu_data,
-					    col, offset);
-    cudaDeviceSynchronize();
-
-    freeTensor(output);
-    cudaFree(convData);
-  }
-  else{
-    Tensor *output = (Tensor*)create4DTensor((cudnnDataType_t) float_type,
-					     CUDNN_TENSOR_NCHW, n, c, h, w);
-
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-    //reduced number after skipping
-    int reduced_filter_elem;
-    if(offset != skip_every){
-      reduced_filter_elem = num_filter_elem - (num_filter_elem/skip_every);
-      if(num_filter_elem % skip_every > skip_every - 1 - offset)
-	reduced_filter_elem = reduced_filter_elem - 1;
-    }
-    else
-      reduced_filter_elem = num_filter_elem;
-
- 
-    float * convData;
-    int convDataSize = sizeof(float) * n * reduced_filter_elem * h * w;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-    float * reducedFilter;
-    checkCudaErrors(cudaMalloc(&reducedFilter, sizeof(float) * c * reduced_filter_elem));
-    const int filtBlockSize = 128;
-    const int filtGridSize = (c * num_filter_elem + filtBlockSize - 1) / filtBlockSize;
-    if(offset != skip_every)
-      createReducedFiltersFull<<<filtGridSize, filtBlockSize>>>(reducedFilter,
-								(float *)filter->gpu_data,
-								c, num_filter_elem, reduced_filter_elem,
-								skip_every, offset);
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    const int blockSize = 128;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h * w + blockSize - 1) / blockSize;
-    if(skip_every == 2){
-      convToGemmFullInput2<<<gridSize, blockSize>>>(convData, (float *)input->gpu_data, n,
-						    input->dims.dim_sizes[1],
-						    input->dims.dim_sizes[2],
-						    input->dims.dim_sizes[3],
-						    KH, KW, vertical_pad, horizontal_pad,
-						    h, w, vertical_stride, horizontal_stride,
-						    reduced_filter_elem, skip_every,
-						    offset);
-    }
-    else{
-      convToGemmFullInput<<<gridSize, blockSize>>>(convData, (float *)input->gpu_data, n,
-						   input->dims.dim_sizes[1],
-						   input->dims.dim_sizes[2],
-						   input->dims.dim_sizes[3],
-						   KH, KW, vertical_pad, horizontal_pad,
-						   h, w, vertical_stride, horizontal_stride,
-						   reduced_filter_elem, skip_every,
-						   offset);
-    }
-
-    checkCudaErrors(cudaDeviceSynchronize());
-    //Do the matrix multiplication. Want to multiply convData by filter->gpu_data[f * chan * KH * KW]
-    const float alpha = 1.0;
-    const float beta = 0.0;
-
-    if(offset != skip_every)
-      checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				   n * h * w, c, reduced_filter_elem,
-				   &alpha,
-				   convData, CUDA_R_32F, n * h * w,
-				   reducedFilter, CUDA_R_32F, reduced_filter_elem,
-				   &beta,
-				   (float *) output->gpu_data, CUDA_R_32F, n * h * w,
-				   CUDA_R_32F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-    else
-      checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				   n * h * w, c, reduced_filter_elem,
-				   &alpha,
-				   convData, CUDA_R_32F, n * h * w,
-				   (float *) filter->gpu_data, CUDA_R_32F,
-				   reduced_filter_elem,
-				   &beta,
-				   (float *) output->gpu_data, CUDA_R_32F, n * h * w,
-				   CUDA_R_32F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-
-    int numBlocks = (n * c * h * w  + 255) / 256;
-    switchMatrixFull<<<numBlocks,256>>>(n * c * h * w, n, c, h, w,
-				    (float *)output->gpu_data,
-				    (float *)new_output->gpu_data);
-
-    checkCudaErrors(cudaDeviceSynchronize());
-    
-    cudaFree(convData);
-    cudaFree(reducedFilter);
-    freeTensor(output);
-  }
-
-  profileEvent("Conv_end");
-  
-  return new_output;
-  
-}
-
-void* tensorConvApproxHalf(void* input_ptr, void* filter_ptr,
-			   int vertical_pad, int horizontal_pad, int vertical_stride,
-			   int horizontal_stride, int conv_mode, int conv_groups,
-			   int row, int col, int skip_every, int offset){
-
-  INFO("*** TensorConvolution half approximation \n");
-  profileEvent("#Conv");
-
-  Tensor* input = (Tensor*)input_ptr;
-  Tensor* filter = (Tensor*)filter_ptr;
-  //FIXME: Current hack to preserve backward compatibilty
-  if (conv_groups == 0) {
-    conv_groups = 1;
-  }
-
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  profileEvent("F2H_start");
-  convertToFP16(input);
-  convertToFP16(filter);
-  profileEvent("F2H_end");
-
-  int n, c, h, w; // output dimensions
-  n = input->dims.dim_sizes[0];
-  c = filter->dims.dim_sizes[0]; //number of filters
-  const int KH = filter->dims.dim_sizes[2];
-  const int KW = filter->dims.dim_sizes[3];
-
-  h = (2 * vertical_pad + input->dims.dim_sizes[2] - KH) / vertical_stride + 1;
-  int h_eff = h - h / row;
-  if(h % row > row - 1 - offset)
-    h_eff = h_eff - 1;
-
-  w = (2 * horizontal_pad + input->dims.dim_sizes[3] - KW) / horizontal_stride + 1;
-  int w_eff = w - w / col;
-  if(w % col > col - 1 - offset)
-    w_eff = w_eff - 1;
-
-
-  Tensor *new_output = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-				       CUDNN_TENSOR_NCHW, n, c, h, w);
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(new_output, DEVICE);
-
-  if(row > 1){
-    Tensor *output_half = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-						  CUDNN_TENSOR_NCHW,
-						  n, c, h_eff, w);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output_half, DEVICE);
-
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    __half * convData;
-    int convDataSize = sizeof(__half) * n * num_filter_elem * h_eff * w;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 256;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h_eff * w + blockSize - 1) / blockSize;
-
-    convToGemmPerfRowHalf<<<gridSize, blockSize>>>(convData, (__half *)input->gpu_half_data, n,
-						   input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-						   input->dims.dim_sizes[3], KH, KW, vertical_pad,
-						   horizontal_pad, h, w, vertical_stride,
-						   horizontal_stride, row, offset, h_eff);
-
-
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    const __half alf = approx_float_to_half(1.0);
-    const __half bet = approx_float_to_half(0.0);
-    const __half *alpha_half = &alf;
-    const __half *beta_half = &bet;
-
-    checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				 n * h_eff * w, c, num_filter_elem,
-				 alpha_half,
-				 convData, CUDA_R_16F, n * h_eff * w,
-				 (__half*) filter->gpu_half_data, CUDA_R_16F, num_filter_elem,
-				 beta_half,
-				 (__half*) output_half->gpu_half_data, CUDA_R_16F, n * h_eff * w,
-				 CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-
-    //interpolate
-    int numBlocks = (n * c * h * w  + 255) / 256;
-    approxInterpolateRowHalf<<<numBlocks,256>>>(n * c * h * w, h_eff, n, c, h, w,
-						(__half *)output_half->gpu_half_data,
-						(__half *)new_output->gpu_half_data,
-						row, offset);
-    cudaDeviceSynchronize();
-
-    freeTensor(output_half);
-    cudaFree(convData);
-  }
-  else if(col > 1){
-    Tensor *output_half = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-					  CUDNN_TENSOR_NCHW, n, c, h, w_eff);
-
-    // NOTE: Changing output tensor placement from host to device
-    changeTensorPlacement(output_half, DEVICE);
-    // NOTE: Necessary to insert the above call for every output tensor
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-
-    __half * convData;
-    int convDataSize = sizeof(__half) * n * num_filter_elem * h * w_eff;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-
-    const int blockSize = 256;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h * w_eff + blockSize - 1) / blockSize;
-
-    convToGemmPerfColHalf<<<gridSize, blockSize>>>(convData, (__half *)input->gpu_half_data, n,
-						   input->dims.dim_sizes[1], input->dims.dim_sizes[2],
-						   input->dims.dim_sizes[3], KH, KW, vertical_pad,
-						   horizontal_pad, h, w, vertical_stride,
-						   horizontal_stride, col, offset, w_eff);
-
-
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    const __half alf = approx_float_to_half(1.0);
-    const __half bet = approx_float_to_half(0.0);
-    const __half *alpha_half = &alf;
-    const __half *beta_half = &bet;
-
-    
-    checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				 n * h * w_eff, c, num_filter_elem,
-				 alpha_half,
-				 convData, CUDA_R_16F, n * h * w_eff,
-				 (__half*) filter->gpu_half_data, CUDA_R_16F, num_filter_elem,
-				 beta_half,
-				 (__half*) output_half->gpu_half_data, CUDA_R_16F, n * h * w_eff,
-				 CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-
-    //interpolate
-    int numBlocks = (n * c * h * w  + 255) / 256;
-    approxInterpolateColHalf<<<numBlocks,256>>>(n * c * h * w, w_eff, n, c, h, w,
-						(__half *)output_half->gpu_half_data,
-						(__half *)new_output->gpu_half_data,
-						col, offset);
-    
-    cudaDeviceSynchronize();
-
-    freeTensor(output_half);
-    cudaFree(convData);
-
-  }
-  else{
-    Tensor *output = (Tensor*)create4DTensor((cudnnDataType_t) half_type,
-				   CUDNN_TENSOR_NCHW, n, c, h, w);
-    
-    //total number of filter elem
-    const int num_filter_elem = KH * KW * input->dims.dim_sizes[1];
-    //reduced number after skipping
-    int reduced_filter_elem;
-    if(offset != skip_every){
-      reduced_filter_elem = num_filter_elem - (num_filter_elem/skip_every);
-      if(num_filter_elem % skip_every > skip_every - 1 - offset)
-	reduced_filter_elem = reduced_filter_elem - 1;
-    }
-    else
-      reduced_filter_elem = num_filter_elem;
-    
-    __half * convData;
-    int convDataSize = sizeof(__half) * n * reduced_filter_elem * h * w;
-    checkCudaErrors(cudaMalloc(&convData, convDataSize));
-    __half * reducedFilter;
-    checkCudaErrors(cudaMalloc(&reducedFilter, sizeof(__half) * c * reduced_filter_elem));
-    const int filtBlockSize = 128;
-    const int filtGridSize = (c * num_filter_elem + filtBlockSize - 1) / filtBlockSize;
-    if(offset != skip_every)
-      createReducedFiltersHalf<<<filtGridSize, filtBlockSize>>>(reducedFilter,
-								(__half *)filter->gpu_half_data,
-								c, num_filter_elem, reduced_filter_elem,
-								skip_every, offset);
-    checkCudaErrors(cudaDeviceSynchronize());
-    
-    const int blockSize = 256;
-    const int gridSize = (n * input->dims.dim_sizes[1] * h * w + blockSize - 1) / blockSize;
-    if(skip_every == 2){
-      convToGemmHalfInput2<<<gridSize, blockSize>>>(convData, (__half *)input->gpu_half_data, n,
-						    input->dims.dim_sizes[1],
-						    input->dims.dim_sizes[2],
-						    input->dims.dim_sizes[3],
-						    KH, KW, vertical_pad, horizontal_pad,
-						    h, w, vertical_stride, horizontal_stride,
-						    reduced_filter_elem, skip_every,
-						    offset);
-    }
-    else{
-      convToGemmHalfInput<<<gridSize, blockSize>>>(convData, (__half *)input->gpu_half_data, n,
-						   input->dims.dim_sizes[1],
-						   input->dims.dim_sizes[2],
-						   input->dims.dim_sizes[3],
-						   KH, KW, vertical_pad, horizontal_pad,
-						   h, w, vertical_stride, horizontal_stride,
-						   reduced_filter_elem, skip_every,
-						   offset);
-    }
-    
-    checkCudaErrors(cudaDeviceSynchronize());
-    //Do the matrix multiplication. Want to multiply convData by filter->gpu_data[f * chan * KH * KW]
-    const __half alf = approx_float_to_half(1.0);
-    const __half bet = approx_float_to_half(0.0);
-    const __half *alpha_half = &alf;
-    const __half *beta_half = &bet;
-    
-    if(offset != skip_every)
-      checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				   n * h * w, c, reduced_filter_elem,
-				   alpha_half,
-				   convData, CUDA_R_16F, n * h * w,
-				   reducedFilter, CUDA_R_16F, reduced_filter_elem,
-				   beta_half,
-				   (__half*) output->gpu_half_data, CUDA_R_16F, n * h * w,
-				   CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-    else
-      checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-				   n * h * w, c, reduced_filter_elem,
-				   alpha_half,
-				   convData, CUDA_R_16F, n * h * w,
-				   (__half*) filter->gpu_half_data, CUDA_R_16F,
-				   reduced_filter_elem,
-				   beta_half,
-				   (__half*) output->gpu_half_data, CUDA_R_16F, n * h * w,
-				   CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-    
-    
-    int numBlocks = (n * c * h * w  + 255) / 256;
-    switchMatrix<<<numBlocks,256>>>(n * c * h * w, n, c, h, w,
-				    (__half *)output->gpu_half_data,
-				    (__half *)new_output->gpu_half_data);
-    
-    checkCudaErrors(cudaDeviceSynchronize());
-    
-    cudaFree(convData);
-    cudaFree(reducedFilter);
-    freeTensor(output);
-      
-  }
-
-  profileEvent("H2F_start");
-  convertToFP32_offline(new_output);
-  profileEvent("H2F_end");
-
-  profileEvent("#Conv_end");
-  
-  return new_output;
-}
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approxhpvm_img_runtime_utils.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approxhpvm_img_runtime_utils.h
deleted file mode 100644
index dc5cddf8a2121a937dbe8cd4582fe1022fd99f48..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approxhpvm_img_runtime_utils.h
+++ /dev/null
@@ -1,238 +0,0 @@
-#ifndef APPROXHPVM_IMG_RUNTIME_UTILS
-#define APPROXHPVM_IMG_RUNTIME_UTILS
-
-#include "configuration.h"
-#include "hpvm-rt-controller.h"
-
-#include "img_tensor_runtime.h"
-
-
-// Utilities header for ApproxHPVM image runtime API (wrapper runtime API)
-
-void* handleTensorFftApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorFft(input); //TODO: correct name here
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorFft", pinfo.first); // and here
-        RC->addToCurrentIterationComputeEnergy("tensorFft", pinfo.second); // and here
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorReduceApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input) {
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorReduce(input); //TODO: correct name here
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorReduce", pinfo.first); // and here
-        RC->addToCurrentIterationComputeEnergy("tensorReduce", pinfo.second); // and here
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::REDUCTION_SAMPLING :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorReductionSamplingReduce(input); //TODO: correct name here
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorReductionSamplingReduce",
-                                              pinfo.first); // and here
-        RC->addToCurrentIterationComputeEnergy("tensorReductionSamplingReduce",
-                                                pinfo.second); // and here
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorProjectiveTApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input) {
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorProjectiveT(input); //TODO: correct name here
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorProjectiveT", pinfo.first); // and here
-        RC->addToCurrentIterationComputeEnergy("tensorProjectiveT", pinfo.second); // and here
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorMap1ApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input) {
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorMap1(input); //TODO: correct name here
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorMap1", pinfo.first); // and here
-        RC->addToCurrentIterationComputeEnergy("tensorMap1", pinfo.second); // and here
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorMap2ApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input) {
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorMap2(input); //TODO: correct name here
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorMap2", pinfo.first); // and here
-        RC->addToCurrentIterationComputeEnergy("tensorMap2", pinfo.second); // and here
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorMap3ApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input) {
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorMap3(input); //TODO: correct name here
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorMap3", pinfo.first); // and here
-        RC->addToCurrentIterationComputeEnergy("tensorMap3", pinfo.second); // and here
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approxhpvm_runtime_utils.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approxhpvm_runtime_utils.h
deleted file mode 100644
index 5e282d130f2a261ba76ebefa2b92af38682a7def..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/approxhpvm_runtime_utils.h
+++ /dev/null
@@ -1,612 +0,0 @@
-
-
-
-#ifndef APPROXHPVM_RUNTIME_UTILS
-#define APPROXHPVM_RUNTIME_UTILS
-
-
-#include "configuration.h"
-#include "hpvm-rt-controller.h"
-
-
-// Utilities header for ApproxHPVM runtime API (wrapper runtime API)
-
-void* handleTensorAddApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input, void* bias) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorAdd(input, bias);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorAdd", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorAdd", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::FP16 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorHalfAdd(input, bias);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorHalfAdd", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorHalfAdd", pinfo.second);
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-    }
-  } else if (approxTuples.size() == 2) {
-    ERROR("Currently unsupported case");
-    abort();
-  } else {
-    ERROR("Unsupported case");
-    abort();
-  }
-  return NULL;
-}
-
-void* handleTensorMulApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-  void* lhs, void* rhs) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorGemmGPU(lhs, rhs);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorGemmGPU", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorGemmGPU", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::FP16 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorHalfGemmGPU(lhs, rhs);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorHalfGemmGPU", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorHalfGemmGPU", pinfo.second);
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-  } else if (approxTuples.size() == 2) {
-    ERROR("Currently unsupported case");
-    abort();
-  } else {
-    ERROR("Unsupported case");
-    abort();
-  }
-  return NULL;
-}
-
-void* handleTensorConvApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-  void* input, void* filter, 
-  int conv_pad_h, int conv_pad_w,
-  int conv_stride_h, int conv_stride_w) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorConvApprox(input, filter,
-                                 conv_pad_h, conv_pad_w,
-                                 conv_stride_h, conv_stride_w,
-                                 1, 1,
-                                 1, 1, 1, 1);
-//        t_out = tensorConvPerfCuda(input, filter,
-//                                   conv_pad_h, conv_pad_w,
-//                                   conv_stride_h, conv_stride_w,
-//                                   1, 1,
-//                                   1, 1, 0);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorConvApprox", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorConvApprox", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::FP16 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorConvApproxHalf(input, filter,
-                                     conv_pad_h, conv_pad_w,
-                                     conv_stride_h, conv_stride_w,
-                                     1, 1,
-                                     1, 1, 1, 1);
-//        t_out = tensorConvPerfCudaHalf(input, filter,
-//                                      conv_pad_h, conv_pad_w,
-//                                      conv_stride_h, conv_stride_w,
-//                                      1, 1,
-//                                      1, 1, 0);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorConvApproxHalf", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorConvApproxHalf", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::PERFORATION :
-        {
-          int row = 0, col = 0, offset = 0;
-          switch (param) {
-            case 21:
-              row = 1; col = 2; offset = 0;
-              break;
-            case 22:
-              row = 1; col = 2; offset = 1;
-              break;
-            case 23:
-              row = 1; col = 3; offset = 0;
-              break;
-            case 24:
-              row = 1; col = 3; offset = 1;
-              break;
-            case 25:
-              row = 1; col = 3; offset = 2;
-              break;
-            case 26:
-              row = 2; col = 1; offset = 0;
-              break;
-            case 27:
-              row = 2; col = 1; offset = 1;
-              break;
-            case 28:
-              row = 3; col = 1; offset = 0;
-              break;
-            case 29:
-              row = 3; col = 1; offset = 1;
-              break;
-            case 30:
-              row = 3; col = 1; offset = 2;
-              break;
-            default:
-              DEBUG("Unsupported Option: Select default, 1-2-0.\n");
-              row = 1; col = 2; offset = 0;
-              break;
-          }
-          void* t_out;
-          RC->resume_profiler();
-          t_out = tensorConvApproxHalf(input, filter,
-                                       conv_pad_h, conv_pad_w,
-                                       conv_stride_h, conv_stride_w,
-                                       1, 1,
-                                       row, col, 1, offset);
-//          t_out = tensorConvPerfCudaHalf(input, filter,
-//                                         conv_pad_h, conv_pad_w,
-//                                         conv_stride_h, conv_stride_w,
-//                                         1, 1,
-//                                         row, col, offset);
-          RC->pause_profiler();
-          std::pair<double, double> pinfo = RC->get_time_energy();
-          RC->reset_profiler();
-          RC->addToCurrentIterationComputeTime("tensorConvApproxHalf(_perf)", pinfo.first);
-          RC->addToCurrentIterationComputeEnergy("tensorConvApproxHalf(_perf)", pinfo.second);
-          return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::INPUT_SAMPLING :
-        {
-          int skip_rate = 2, offset = 0;
-          switch (param) {
-            case 31:
-              skip_rate = 2; offset = 0;
-              break;
-            case 32:
-              skip_rate = 2; offset = 1;
-              break;
-            case 33:
-              skip_rate = 4; offset = 0;
-              break;
-            case 34:
-              skip_rate = 4; offset = 1;
-              break;
-            case 35:
-              skip_rate = 4; offset = 2;
-              break;
-            case 36:
-              skip_rate = 4; offset = 3;
-              break;
-            default:
-              DEBUG("Unsupported Option: Select default, 2-0.\n");
-              skip_rate = 2; offset = 0;
-              break;
-          }
-          void* t_out;
-          RC->resume_profiler();
-          t_out = tensorConvApproxHalf(input, filter,
-                                       conv_pad_h, conv_pad_w,
-                                       conv_stride_h, conv_stride_w,
-                                       1, 1,
-                                       1, 1, skip_rate, skip_rate - 1/*offset*/); //FIXME
-          RC->pause_profiler();
-          std::pair<double, double> pinfo = RC->get_time_energy();
-          RC->reset_profiler();
-          RC->addToCurrentIterationComputeTime("tensorConvApproxHalf(_samp)", pinfo.first);
-          RC->addToCurrentIterationComputeEnergy("tensorConvApproxHalf(_samp)", pinfo.second);
-          // Overwrite the result writen in t_out with the simulation result,
-          // to propagate that to the next layer.
-          // TODO: Remove this call when bug is fixed in 
-//          t_out = tensorConvSampSim(input, filter,
-//                                    conv_pad_h, conv_pad_w,
-//                                    conv_stride_h, conv_stride_w,
-//                                    1, 1, skip_rate, offset);
-          return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-    }
-  } else if (approxTuples.size() == 2) {
-    ERROR("Currently unsupported case");
-    abort();
-  } else {
-    ERROR("Unsupported case");
-    abort();
-  }
-  return NULL;
-}
-
-void* handleTensorGroupConvApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-  void* input, void* filter,
-  int vertical_pad, int horizontal_pad,
-  int vertical_stride, int horizontal_stride,
-  int conv_mode, int conv_groups) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorConvCutlass(input, filter,
-                                  vertical_pad, horizontal_pad,
-                                  vertical_stride, horizontal_stride,
-                                  conv_mode, conv_groups);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorConvCutlass", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorConvCutlass", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::FP16 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorHalfConvCutlass(input, filter,
-                                      vertical_pad, horizontal_pad,
-                                      vertical_stride, horizontal_stride,
-                                      conv_mode, conv_groups);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorHalfConvCutlass", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorHalfConvCutlass", pinfo.second);
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorBatchNormApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-  void* input_ptr, void* gamma_ptr, void* beta_ptr,
-  void* mean_ptr, void* variance_ptr, double epsilon) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorBatchNorm(input_ptr, gamma_ptr, beta_ptr,
-                               mean_ptr, variance_ptr, epsilon);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorBatchNorm", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorBatchNorm", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::FP16 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorHalfBatchNorm(input_ptr, gamma_ptr, beta_ptr,
-                                   mean_ptr, variance_ptr, epsilon);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorHalfBatchNorm", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorHalfBatchNorm", pinfo.second);
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-    // TODO additional approx methods implemented here
-    }
-  } else if (approxTuples.size() == 2) {
-    ERROR("Currently unsupported case");
-    abort();
-  } else {
-    ERROR("Unsupported case");
-    abort();
-  }
-  return NULL;
-}
-
-void* handleTensorReluApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorRelu(input);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorRelu", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorRelu", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::FP16 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorHalfRelu(input);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorHalfRelu", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorHalfRelu", pinfo.second);
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorClippedReluApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input, float min, float max) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorRelu2(input, min, max);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorRelu2", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorRelu2", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::FP16 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorHalfRelu2(input, min, max);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorHalfRelu2", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorHalfRelu2", pinfo.second);
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorTanhApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorTanh(input);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorTanh", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorTanh", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::FP16 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorHalfTanh(input);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorHalfTanh", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorHalfTanh", pinfo.second);
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorPoolingApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-  void* input_ptr, int poolFunction,
-  int window_height, int window_width,
-  int vertical_pad, int horizontal_pad,
-  int vertical_stride, int horizontal_stride) {
-
-  if (approxTuples.size() == 1) {
-    enum GPUNodeConfiguration::APPROX approx = approxTuples[0].first;
-    int param = approxTuples[0].second;
-    switch (approx) {
-      case GPUNodeConfiguration::APPROX::FP32 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorPooling(input_ptr,
-                             poolFunction,
-                             window_height, window_width,
-                             vertical_pad, horizontal_pad,
-                             vertical_stride, horizontal_stride);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorPooling", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorPooling", pinfo.second);
-        return t_out;
-        }
-      case GPUNodeConfiguration::APPROX::FP16 :
-        {
-        void* t_out;
-        RC->resume_profiler();
-        t_out = tensorHalfPooling(input_ptr,
-                                 poolFunction,
-                                 window_height, window_width,
-                                 vertical_pad, horizontal_pad,
-                                 vertical_stride, horizontal_stride);
-        RC->pause_profiler();
-        std::pair<double, double> pinfo = RC->get_time_energy();
-        RC->reset_profiler();
-        RC->addToCurrentIterationComputeTime("tensorHalfPooling", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("tensorHalfPooling", pinfo.second);
-        return t_out;
-        }
-      default :
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      // TODO additional approx methods implemented here
-      }
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  return NULL;
-}
-
-void* handleTensorSoftmaxApproximationTuples(
-  std::vector< std::pair<GPUNodeConfiguration::APPROX, int> > &approxTuples,
-   void* input_ptr) {
-  //TODO: if approximation choices are added for softmax operation,
-  // implement this like the other handle* functions
-  void* t_out;
-  RC->resume_profiler();
-  t_out = tensorSoftmax(input_ptr);
-  RC->pause_profiler();
-  std::pair<double, double> pinfo = RC->get_time_energy();
-  RC->reset_profiler();
-  RC->addToCurrentIterationComputeTime("tensorSoftmax", pinfo.first);
-  RC->addToCurrentIterationComputeEnergy("tensorSoftmax", pinfo.second);
-  return t_out;
-}
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/configuration.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/configuration.h
deleted file mode 100644
index 99c465434a2879d85624b7ff6bb4141dd8fe4634..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/configuration.h
+++ /dev/null
@@ -1,360 +0,0 @@
-#ifndef LLVM_HPVM_CONFIGURATION_H
-#define LLVM_HPVM_CONFIGURATION_H
-
-#include <map>
-#include <vector>
-
-#include "debug.h"
-
-// Configuration related class definitions - in Configuration.h
-
-// Describes the internal choices made for an ApproxHPVM node
-class NodeConfiguration {
-public:
-  enum NODE_CONFIGURATION_TARGET
-  {
-    PROMISE,
-    GPU,
-    END
-  };
-
-protected:
-  enum NODE_CONFIGURATION_TARGET NODE_CONFIGURATION_TARGET_ID;
-
-public:
-  bool isPROMISENodeConfiguration() {
-    return NODE_CONFIGURATION_TARGET_ID == PROMISE;
-  }
-
-  bool isGPUNodeConfiguration() {
-    return NODE_CONFIGURATION_TARGET_ID == GPU;
-  }
-
-  virtual void print() = 0;
-};
-
-class PROMISENodeConfiguration : public NodeConfiguration {
-public:
-  // Approximation methods available for this HW type
-  enum APPROX
-  {
-    SWING_LEVEL,
-    END
-  };
-
-private:
-  // A vector, containing pairs of approximation method and tunable parameter
-  // (expressed as int, or ignored when not applicable)
-  std::vector< std::pair<enum APPROX, int> > ApproxChoices;
-
-public:
-  void pushNewApproximationChoice(enum APPROX approx, int u) {
-    ApproxChoices.push_back(std::make_pair(approx, u));
-  }
-
-  std::vector<std::pair<enum APPROX, int> > &getApproxChoices() {
-    return ApproxChoices;
-  }
-
-  PROMISENodeConfiguration() {
-    NODE_CONFIGURATION_TARGET_ID = PROMISE;
-  }
-  ~PROMISENodeConfiguration() {}
-
-  void print() override;
-};
-
-class GPUNodeConfiguration : public NodeConfiguration {
-public:
-  // Approximation methods available for this HW type
-  enum APPROX
-  {
-    FP32,
-    FP16,
-    PERFORATION,
-    INPUT_SAMPLING,
-    REDUCTION_SAMPLING,
-//  ADDITIONAL_APPROXIMATION_METHOD
-    APPROX_END
-  };
-
-  // Operations to be approximated in the node using this configuration
-  enum TENSOR_OP
-  {
-    ADD,
-    BATCHNORM,
-    CONV,
-    GROUP_CONV,
-    MUL,
-    RELU,
-    CLIPPED_RELU,
-    TANH,
-    POOL_MAX,
-    POOL_MEAN,
-    POOL_MIN,
-    SOFTMAX,
-    FFT,
-    REDUCE,
-    PROJECTIVE_T,
-    MAP1,
-    MAP2,
-    MAP3,
-//    STENCIL,
-//    COSINE_T,
-//  ADDITIONAL_TENSOR_OPERATION
-    TENSOR_OP_END
-  };
-
-private:
-  // A vector, containing pairs of approximation method and tunable parameter
-  // (expressed as int, or ignored when not applicable) for each operation
-  std::vector< std::pair< enum TENSOR_OP, std::vector< std::pair<enum APPROX, int> > > > ApproxChoices;
-
-public:
-  void pushNewTensorOperation(enum TENSOR_OP top) {
-    std::vector< std::pair<enum APPROX, int> > emptyVec;
-    ApproxChoices.push_back(std::make_pair(top, emptyVec));
-  }
-
-  void pushNewApproximationChoiceForOperation(enum APPROX approx, int u) {
-    unsigned size = ApproxChoices.size();
-    CUSTOM_ASSERT(size >=1 && "Cannot apply approximation choice to non existent operation.");
-    ApproxChoices[size-1].second.push_back(std::make_pair(approx, u));
-  }
-
-  std::vector< std::pair< enum TENSOR_OP, std::vector< std::pair<enum APPROX, int> > > > &getApproxChoices() {
-    return ApproxChoices;
-  }
-
-  GPUNodeConfiguration() {
-    NODE_CONFIGURATION_TARGET_ID = GPU;
-  }
-  ~GPUNodeConfiguration() {}
-
-  void print() override;
-};
-
-// Configuration : Includes configuration information :
-// - name
-// - speedup
-// - energy
-// - accuracy (compared to golden output)
-// - accuracy loss (compared to baseline)
-// - a hardware choice and set or operations-approximation choices, described in setup
-struct Configuration {
-  std::string name;
-  float speedup;
-  float energy;
-  float accuracy;
-  float accuracyLoss;
-  std::map<std::string, NodeConfiguration * > setup;
-
-  Configuration(std::string &n, float f, float e, float a, float al) :
-    name(n), speedup(f), energy(e), accuracy(a), accuracyLoss(al) {}
-
-  float getSpeedup() {
-    return speedup;
-  }
-
-  float getEnergy() {
-    return energy;
-  }
-
-  float getAccuracy() {
-    return accuracy;
-  }
-
-  float getAccuracyLoss() {
-    return accuracyLoss;
-  }
-
-  void print();
-};
-
-// Comparison operator definition, in increasing accuracy loss
-// (for std sort, used in pareto optimal computation)
-struct ConfigurationLessThan {
-  bool operator()(const struct Configuration &a, const struct Configuration &b) const {
-    return (a.accuracyLoss < b.accuracyLoss) ;
-  }
-};
-
-// Comparison operator definition, in increasing accuracy loss
-// (for std lower bound, used in pareto optimal frontier search)
-struct ConfigurationLessThan_AL {
-  bool operator()(const struct Configuration *a, const float &b) const {
-    return (a->accuracyLoss < b) ;
-  }
-};
-
-// Comparison operator definition, in increasing speedup
-// (for std lower bound, used in pareto optimal frontier search)
-struct ConfigurationLessThan_SP {
-  bool operator()(const struct Configuration *a, const float &b) const {
-    return (a->speedup < b) ;
-  }
-};
-
-// Comparison operator definition, in decreasing energy
-// (for std lower bound, used in pareto optimal frontier search)
-struct ConfigurationLessThan_E {
-  bool operator()(const struct Configuration *a, const float &b) const {
-    return (a->energy < b) ;
-  }
-};
-
-enum SEARCH_KIND
-  {
-    SPEEDUP,
-    ENERGY,
-    ACCURACY_LOSS,
-    END
-  };
-
-//****** HEADER Ends - Source Starts
-
-
-// Helper configuration print methods
-
-void PROMISENodeConfiguration::print() {
-
-  printf(" promise");
-  for (auto &it : ApproxChoices) {
-    printf(" ");
-    switch (it.first) {
-      case APPROX::SWING_LEVEL :
-        printf("swing_level");
-        break;
-      default:
-        ERROR("Unknown approximation option");
-        break;
-      // TODO additional approx methods to be printed here
-    }
-    printf(" %d", it.second);
-  }
-
-  printf("\n");
-
-}
-
-void GPUNodeConfiguration::print() {
-
-  printf(" gpu");
-  for (auto &it : ApproxChoices) {
-
-    printf(" ");
-    switch (it.first) {
-      case TENSOR_OP::ADD :
-        printf("add");
-        break;
-      case TENSOR_OP::BATCHNORM :
-        printf("batchnorm");
-        break;
-      case TENSOR_OP::CONV :
-        printf("conv");
-        break;
-      case TENSOR_OP::GROUP_CONV :
-        printf("group_conv");
-        break;
-      case TENSOR_OP::MUL :
-        printf("mul");
-        break;
-      case TENSOR_OP::RELU :
-        printf("relu");
-        break;
-      case TENSOR_OP::CLIPPED_RELU :
-        printf("clipped_relu");
-        break;
-      case TENSOR_OP::TANH :
-        printf("tanh");
-        break;
-      case TENSOR_OP::POOL_MAX :
-        printf("pool_max");
-        break;
-      case TENSOR_OP::POOL_MEAN :
-        printf("pool_mean");
-        break;
-      case TENSOR_OP::POOL_MIN :
-        printf("pool_min");
-        break;
-      case TENSOR_OP::SOFTMAX :
-        printf("softmax");
-        break;
-      case TENSOR_OP::FFT :
-        printf("fft");
-        break;
-      case TENSOR_OP::REDUCE :
-        printf("reduce");
-        break;
-      case TENSOR_OP::PROJECTIVE_T :
-        printf("projectiveT");
-        break;
-      case TENSOR_OP::MAP1 :
-        printf("map1");
-        break;
-      case TENSOR_OP::MAP2 :
-        printf("map2");
-        break;
-      case TENSOR_OP::MAP3 :
-        printf("map3");
-        break;
-      default :
-        ERROR("Unknown tensor operation.");
-        break;
-      // TODO additional operations to be printed here
-    }
-
-    auto &approxVec = it.second;
-    for (auto &inner_it : approxVec) {
-      printf(" ");
-      switch (inner_it.first) {
-        case APPROX::FP32 :
-          printf("fp32");
-          break;
-        case APPROX::FP16 :
-          printf("fp16");
-          break;
-        case APPROX::PERFORATION :
-          printf("perf");
-          break;
-        case APPROX::INPUT_SAMPLING :
-          printf("samp");
-          break;
-        case APPROX::REDUCTION_SAMPLING :
-          printf("red_samp");
-          break;
-        default:
-          ERROR("Unknown approximation option");
-          break;
-        // TODO additional approx methods to be printed here
-      }
-      
-      printf(" %d", inner_it.second);
-    }
-
-  }
-  
-  printf("\n");
-
-}
-
-void Configuration::print() {
-
-  printf("+++++\n");
-  printf("%s %f %f %f %f\n", name.c_str(), speedup, energy, accuracy, accuracyLoss);
-  for  (std::map<std::string, NodeConfiguration* >::const_iterator it = setup.begin();
-    it != setup.end(); ++it) {
-    printf("%s :", it->first.c_str());
-
-    it->second->print();
-  }
-
-  printf("-----\n");
-}
-
-
-
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/debug.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/debug.h
deleted file mode 100644
index 33864fed94f3a86d065f2f166adbfc36127cc42d..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/debug.h
+++ /dev/null
@@ -1,123 +0,0 @@
-
-
-#ifndef RUNTIME_DEBUG
-#define RUNTIME_DEBUG
-
-#define LOG_DEBUG 1   // Sets the debug logging to true
-#define LOG_INFO 1  // Sets the info logging to true
-#define ASSERT_FLAG // Sets assertions to true (opposite of NDEBUG macro)
-
-#include "tensor.h"
-
-
-#define FatalError(s) do {                                             \
-    std::stringstream _where, _message;                                \
-    _where << __FILE__ << ':' << __LINE__;                             \
-    _message << std::string(s) + "\n" << __FILE__ << ':' << __LINE__;  \
-    std::cerr << _message.str() << "\nAborting...\n";                  \
-    cudaDeviceReset();                                                 \
-    exit(1);                                                           \
-} while(0)
-
-
-#define checkCUDNN(status) do {                                        \
-    std::stringstream _error;                                          \
-    if (status != CUDNN_STATUS_SUCCESS) {                              \
-      _error << "CUDNN failure: " << cudnnGetErrorString(status);      \
-      FatalError(_error.str());                                        \
-    }                                                                  \
-} while(0)
-
-
-#define checkCudaErrors(status) do {                                   \
-    std::stringstream _error;                                          \
-    if (status != 0) {                                                 \
-      _error << "Cuda failure: " << status;                            \
-      FatalError(_error.str());                                        \
-    }                                                                  \
-} while(0)
-
-
-
-void INFO(const char* format, ...){
-  if(!LOG_INFO) // Don't print if logging info is disabled
-    return;
-  va_list args;
-  va_start(args, format);
-  printf("INFO: ");
-  vprintf(format, args);
-  va_end(args);
-}
-
-void DEBUG(const char* format, ...){
-  if(!LOG_DEBUG) // Don't print if logging info is disabled
-    return;
-  va_list args;
-  va_start(args, format);
-  printf("DEBUG: ");
-  vprintf(format, args);
-  va_end(args);
-}
-
-void ERROR(const char* format, ...){
-  if(!LOG_DEBUG) // Don't print if logging info is disabled
-    return;
-  va_list args;
-  va_start(args, format);
-  printf("ERROR!: ");
-  vprintf(format, args);
-  va_end(args);
-
-  abort();
-}
-
-#ifdef ASSERT_FLAG
-#define CUSTOM_ASSERT(x) do {                                 \
-  if (!(x)) {                                                 \
-    std::stringstream _message;                               \
-    _message << "Assertion failed at "                        \
-             << __FILE__ << ':' << __LINE__                   \
-             << " inside function " << __FUNCTION__ << "\n"   \
-             << "Condition: " << #x << "\n";                  \
-    std::cerr << _message.str();                              \
-    abort();                                                  \
-  }                                                           \
-} while (0)
-#else
-#define CUSTOM_ASSERT(x) do { } while (0)
-#endif
-
-void fillOnes(struct Tensor* tensor){
-  // initialization is specific to the floating point type
-  if(tensor->data_type == CUDNN_DATA_FLOAT){
-    float* data_arr = (float*) tensor->host_data;
-    for(unsigned int i = 0; i < tensor->num_elems; i++){
-      data_arr[i] = 1.0;    
-    }
-  }
-}
-
-
-void printTensorDescInfo(struct Tensor* tensor){
-
-  cudnnDataType_t dType;
-  int nStride, cStride, hStride, wStride;
-  int size1, size2, size3, size4;
-  cudnnGetTensor4dDescriptor(tensor->tensor_desc,
-  			     &dType,
-  			     &size1, &size2, &size3, &size4,
-  			     &nStride, &cStride, &hStride, &wStride);
-
-  DEBUG("dType = %d, size1 = %d, size2 = %d, size3 = %d, size4 = %d \n",
-  	 dType, size1, size2, size3, size4);
-  
-  DEBUG("nStride = %d, cStride = %d, hStride = %d, wStride = %d \n",
-  	 nStride, cStride, hStride, wStride);
-  
-}
-
-
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/error.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/error.h
deleted file mode 100644
index e2e78f1d10c048d73755df73d553b3932ab72d24..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/error.h
+++ /dev/null
@@ -1,1032 +0,0 @@
-
-#ifndef ERROR_HEADER
-#define ERROR_HEADER
-
-
-#include <stdio.h>
-#include <stdarg.h>
-#include <cstdio>
-#include <cstdlib>
-#include <cmath>
-#include <ctime>
-#include <cfloat>
-#include <algorithm>
-#include <sstream>
-#include <vector>
-#include <iostream>
-#include <random>
-#include <string>
-#include <time.h>
-
-#include <curand.h>
-#include <curand_kernel.h>
-#include <math.h>
-#include <assert.h>
-
-
-#include "../include/debug.h"
-#include "tensor.h"
-#include "profiling.h"
-#include "tensor_utils.cu"
-#include "global_data.h"
-
-
-void readSkipTensors(int* skip_tensor_ids, int op_count){
-
-  for(int i = 0; i < op_count; i++){
-    int tensor_id = skip_tensor_ids[i];
-    skip_tensors[tensor_id] = 1;
-  }
-
-}
-
-
-
-
-void readOpenTunerFlags(const char* file_name){
-
-  total_ops = 0;
-  op_counter = 0;
-  op_accuracies.clear();
-  
-  FILE* fp = fopen(file_name, "r");
-  if(fp == NULL){
-    INFO("\nWARNING: File 'opentuner_flags' not found \n\n\n");
-    return;
-  }
-    
-  int retVal = 200;
-  while(retVal != EOF){
-
-    int op_acc;
-    if(fp != NULL)
-      retVal = fscanf(fp, "%d", &op_acc);
-    else
-      op_acc = 0;
-
-    op_accuracies.push_back(op_acc);
-    total_ops++;
-  }
-
-  fclose(fp);
-}
-
-
-void readQuantRanges(char* file_name){
-
-  total_ops = 0;
-  op_counter = 0;
-  quant_ranges.clear();
-  
-  FILE* fp = fopen(file_name, "r");
-  if(fp == NULL){
-    ERROR("File %s not found \n", file_name);
-  }
-    
-  int retVal = 200;
-  while(retVal != EOF && retVal != -1){
-
-    int min;
-    int max;
-    if(fp != NULL){
-      retVal = fscanf(fp, "%d", &min);
-      printf("min =% d \n", min);
-
-      retVal = fscanf(fp, "%d", &max);
-      printf("max =% d \n", max);
-    }
-
-    if(retVal != -1){
-      struct Range* range = (struct Range*) malloc(sizeof(struct Range));
-      range->min = min;
-      range->max = max;
-      quant_ranges.push_back(range);
-      total_ops++;
-    }
-  }
-  
-  fclose(fp);
-}
-
-
-
-
-
-/*__device__ inline void atomicAdd(float* address, float value)
-
-{
-
-  float old = value;
-  float new_old;
-
-  do{
-    new_old = atomicExch(address, 0.0f);
-    new_old += old;
-  }
-
-  while ((old = atomicExch(address, new_old))!=0.0f);
-
-};
-*/
-
-
-
-
-
-Norm_t* calculateNorms(Tensor* x, Tensor* x_orig){
-
-  deviceToHostCopy(x);
-  deviceToHostCopy(x_orig);
-
-  // NOTE: Move floats to doubles - overflow is quite possible
-  float l1_norm = 0.0;
-  float l2_norm = 0.0;
-  float inf_norm = -1.0;
-  double total = 0.0;
-
-  float* arr1 = (float*) x->host_data;
-  float* arr2 = (float*) x_orig->host_data;
-  
-  for(unsigned int i = 0; i < x->num_elems; i++){
-
-    total = total + arr2[i];
-    
-    float diff = abs(arr1[i] - arr2[i]);
-    l1_norm += diff;
-    l2_norm += (arr1[i] - arr2[i]) *  (arr1[i] - arr2[i]);
-
-    if(inf_norm < diff)
-      inf_norm = diff;
-  }
-
-  l1_norm = l1_norm / (x->num_elems * 1.0);
-  l2_norm = l2_norm / (x->num_elems * 1.0);
-
-  double distribution_mean = total / (x->num_elems * 1.0);
-  l1_norm = l1_norm / distribution_mean;
-  l2_norm = l2_norm / distribution_mean;
-
-    
-  Norm_t* norms = (Norm_t*) malloc(sizeof(Norm_t));
-  norms->l1_norm = l1_norm;
-  norms->l2_norm = l2_norm;
-  norms->inf_norm = inf_norm;  
-  
-  INFO("l1_norm = %f \n", l1_norm);
-  INFO("l2_norm = %f \n", l2_norm);
-  INFO("inf_norm = %f \n", inf_norm);
-
-  return norms;
-}
-
-
-
-Norm_t* calculateNorms2(Tensor* x, Tensor* x_orig){
-
-  deviceToHostCopy(x);
-  deviceToHostCopy(x_orig);
-
-  // NOTE: Move all floats to doubles - overflow is quite possible
-  double l0_norm_A = 0.0;
-  double l0_norm_B = 0.0;
-
-  double l1_norm_A = 0.0;
-  double l1_norm_B = 0.0;
-  
-  double l2_norm_A = 0.0;
-  double l2_norm_B = 0.0;
-  float inf_norm = -1.0;
-  float orig_inf_norm = -1.0;
-  double total_diff = 0.0;
-  double total_diff_squared = 0.0;
- 
-  float* arr1 = (float*) x->host_data;
-  float* arr2 = (float*) x_orig->host_data;
-  
-  for(unsigned int i = 0; i < x->num_elems; i++){
-
-    if(arr2[i] != 0.0)
-      l0_norm_A = l0_norm_A + 1.0;
-    if(arr1[i] != 0.0)
-      l0_norm_B = l0_norm_B + 1.0;
-        
-    l1_norm_A = l1_norm_A + abs(arr2[i]);
-    l1_norm_B = l1_norm_B + abs(arr1[i]);
-
-    l2_norm_A = l2_norm_A + (arr2[i] * arr2[i]);
-    l2_norm_B = l2_norm_B + (arr1[i] * arr1[i]);
-      
-    float diff = abs(arr1[i] - arr2[i]);
-    total_diff = total_diff + diff;
-    float diff_squared = diff * diff;
-    total_diff_squared = total_diff_squared + diff_squared; 
-
-
-    if(orig_inf_norm < diff){
-      orig_inf_norm = diff;
-    }
-    
-    // Relative difference value
-    float normalized_diff = diff / arr2[i];   
-    if(inf_norm < normalized_diff){
-      inf_norm = normalized_diff;
-    }    
-  }
-
-  // Relative L1 and Mean L1 norms of the difference Matrix
-  float mean_l1 = ( total_diff ) / x->num_elems;
-  float relative_l1 = ( total_diff ) / l1_norm_A;
-
-  // Computing Relative L2 norm - i.e., Euclidean distance
-  double norm_root_A = sqrt(l2_norm_A);
-  double diff_root = sqrt(total_diff_squared);
-  float mean_l2 = diff_root / x->num_elems;
-  float relative_l2 = diff_root / norm_root_A;
-
-  // Packing computed norms in Norm_t struct
-  Norm_t* norms = (Norm_t*) malloc(sizeof(Norm_t));
-  // Mean metrics - not normalized for the distribution - suitable for precision tuning hardware
-  norms->mean_l1 = mean_l1;
-  norms->mean_l2 = mean_l2;
-  norms->orig_inf_norm = orig_inf_norm;
-
-  // Relative metrics (relative to distribution) - suitable for PROMISE
-  norms->l1_norm = relative_l1;
-  norms->l2_norm = relative_l2;
-  norms->inf_norm = inf_norm;  
-  
-  INFO("l1_norm = %f \n", relative_l1);
-  INFO("l2_norm = %f \n", relative_l2);
-  INFO("inf_norm = %f \n", inf_norm);
-
-  return norms;
-}
-
-
-
-
-
-__global__ void normComputeKernel(float* A, float * B, double* l1_A, double* l2_A,
-				  double* l1_diff, double* l2_diff, unsigned int n){
-
-  int i = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(i < n){
-    
-    double diff = fabsf(A[i] - B[i]);
-    double diff_squared = diff * diff;   
-
-    atomicAdd( l1_A,  fabsf(A[i]) );
-    atomicAdd( l2_A, (A[i] * A[i]) );
-
-    atomicAdd( l1_diff, diff);
-    atomicAdd( l2_diff, diff_squared);
-  }
-}
-
-
-
-__inline__ __device__ double warpReduceSum(double val) {
-
-  for (int offset = warpSize/2; offset > 0; offset /= 2)
-    val += __shfl_down(val, offset);
-
-  return val;
-}
-
-__inline__ __device__ double blockReduceSum(double val) {
-
-  static __shared__ double shared[32]; // Shared mem for 32 partial sums
-  int lane = threadIdx.x % warpSize;
-  int wid = threadIdx.x / warpSize;
-
-  val = warpReduceSum(val);     // Each warp performs partial reduction
-
-  if (lane == 0)
-    shared[wid]=val; // Write reduced value to shared memory
-
-  
-  __syncthreads();              // Wait for all partial reductions
-
-  
-  //read from shared memory only if that warp existed
-  val = (threadIdx.x < blockDim.x / warpSize) ? shared[lane] : 0;
-
-  if (wid == 0) val = warpReduceSum(val); //Final reduce within first warp
-
-  return val;
-
-}
-
-
-
-__global__ void deviceReduceBlockAtomicKernel(float* A, float* B, int N,
-					      double* A_l1, double* A_l2,
-					      double* diff_l1, double* diff_l2) {
-
-  double sum_A_l1 = double(0);
-  double sum_A_l2 = double(0);
-  double sum_diff_l1 = double(0);
-  double sum_diff_l2 = double(0);
-
-  for(int i = blockIdx.x * blockDim.x + threadIdx.x; i < N; i += blockDim.x * gridDim.x) {
-
-    sum_A_l1 += fabsf(A[i]);
-    sum_A_l2 += (A[i] * A[i]);
-    double diff1 = A[i] - B[i];
-    sum_diff_l1 += fabsf(diff1);
-    double diff2 = diff1 * diff1;
-    sum_diff_l2 += diff2;
-  }
-
-  sum_A_l1 = blockReduceSum(sum_A_l1);
-  sum_A_l2 = blockReduceSum(sum_A_l2);
-  sum_diff_l1 = blockReduceSum(sum_diff_l1);
-  sum_diff_l2 = blockReduceSum(sum_diff_l2);
-  
-  if (threadIdx.x == 0){
-    atomicAdd(A_l1, sum_A_l1);
-    atomicAdd(A_l2, sum_A_l2);
-    atomicAdd(diff_l1, sum_diff_l1);
-    atomicAdd(diff_l2, sum_diff_l2);
-  }   
-}
-
-
-void deviceReduce(float* A, float* B, int N,
-		  double* A_l1, double* A_l2,
-		  double* diff_l1, double* diff_l2) {
-
-  int threads = 512;
-  int blocks = min((N + threads - 1) / threads, 1024);
-
-  deviceReduceBlockAtomicKernel<<<blocks, threads>>>(A, B, N, A_l1, A_l2, diff_l1, diff_l2);
-  //-- deviceReduceKernel<<<1, 1024>>>(out, out, blocks);
-}
-
-
-
-// Compute Norms on the GPU
-Norm_t* calculateNormsTreeReduction(Tensor* x, Tensor* x_orig){
-
-  hostToDeviceCopy(x);
-  hostToDeviceCopy(x_orig);
-
-  // FIXIT: Move all floats to doubles - overflow is possible
-  double l1_norm_A;
-  double l2_norm_A;
-
-  double l1_diff;
-  double l2_diff;
-
-  // Device pointers
-  double *l1_norm_A_d;
-  double *l2_norm_A_d;
-  double *l1_diff_d;
-  double *l2_diff_d;
-  
-  cudaMalloc( (void**) &l1_norm_A_d, sizeof(double));
-  cudaMalloc( (void**) &l2_norm_A_d, sizeof(double));
-  cudaMalloc( (void**) &l1_diff_d, sizeof(double));
-  cudaMalloc( (void**) &l2_diff_d, sizeof(double));
- 
-    
-  float* arr1 = (float*) x->gpu_data;
-  float* arr2 = (float*) x_orig->gpu_data;
-
-  //normComputeKernel<<<gridSize, blockSize>>>(arr1, arr2, l1_norm_A_d, l2_norm_A_d, l1_diff_d, l2_diff_d, x->num_elems);
-  deviceReduce(arr1, arr2, x->num_elems, l1_norm_A_d, l2_norm_A_d, l1_diff_d, l2_diff_d);
-  
-  cudaMemcpy(&l1_norm_A, l1_norm_A_d, sizeof(double), cudaMemcpyDeviceToHost);
-  cudaMemcpy(&l2_norm_A, l2_norm_A_d, sizeof(double), cudaMemcpyDeviceToHost);
-  cudaMemcpy(&l1_diff, l1_diff_d, sizeof(double), cudaMemcpyDeviceToHost);
-  cudaMemcpy(&l2_diff, l2_diff_d, sizeof(double), cudaMemcpyDeviceToHost);
-
-  INFO("l1_norm_A = %f, l2_norm_A = %f, l1_diff = %f, l2_diff = %f \n",
-       l1_norm_A, l2_norm_A,l1_diff, l2_diff);
-
-  // Relative L1 and Mean L1 norms of the difference Matrix
-  float mean_l1 = l1_diff / x->num_elems;
-  float relative_l1 = l1_diff / l1_norm_A;
-
-  // Computing Relative L2 norm - i.e., Euclidean distance
-  double norm_root_A = sqrt(l2_norm_A);
-  double diff_root = sqrt(l2_diff);
-  float mean_l2 = diff_root / x->num_elems;
-  float relative_l2 = diff_root / norm_root_A;
-
-  // Packing computed norms in Norm_t struct
-  Norm_t* norms = (Norm_t*) malloc(sizeof(Norm_t));
-  // Mean metrics - not normalized for the distribution - suitable for precision tuning hardware
-  norms->mean_l1 = mean_l1;
-  norms->mean_l2 = mean_l2;
-  norms->orig_inf_norm = 0.0;
-
-  // Relative metrics (relative to distribution) 
-  norms->l1_norm = relative_l1;
-  norms->l2_norm = relative_l2;
-  norms->inf_norm = 0.0;  
-  
-  INFO("l1_norm = %f \n", relative_l1);
-  INFO("l2_norm = %f \n", relative_l2);
-
-  return norms;
-}
-
-
-
-
-// Compute Norms on the GPU
-Norm_t* calculateNormsGPU(Tensor* x, Tensor* x_orig){
-
-  hostToDeviceCopy(x);
-  hostToDeviceCopy(x_orig);
-
-  // FIXIT: Move all floats to doubles - overflow is possible
-  
-  double l1_norm_A;
-  double l2_norm_A;
-
-  double l1_diff;
-  double l2_diff;
-
-  // Device pointers
-  double *l1_norm_A_d;
-  double *l2_norm_A_d;
-  double *l1_diff_d;
-  double *l2_diff_d;
-  
-  cudaMalloc( (void**) &l1_norm_A_d, sizeof(double));
-  cudaMalloc( (void**) &l2_norm_A_d, sizeof(double));
-  cudaMalloc( (void**) &l1_diff_d, sizeof(double));
-  cudaMalloc( (void**) &l2_diff_d, sizeof(double));
- 
-    
-  float* arr1 = (float*) x->gpu_data;
-  float* arr2 = (float*) x_orig->gpu_data;
-
-  int blockSize = 1024;
-  int gridSize = (int) ceil ((float) x->num_elems / blockSize);
-  INFO("blockSize = %d, gridSize = %d \n", blockSize, gridSize);
-
-  normComputeKernel<<<gridSize, blockSize>>>(arr1, arr2, l1_norm_A_d, l2_norm_A_d, l1_diff_d, l2_diff_d, x->num_elems);
-
-  cudaMemcpy(&l1_norm_A, l1_norm_A_d, sizeof(double), cudaMemcpyDeviceToHost);
-  cudaMemcpy(&l2_norm_A, l2_norm_A_d, sizeof(double), cudaMemcpyDeviceToHost);
-  cudaMemcpy(&l1_diff, l1_diff_d, sizeof(double), cudaMemcpyDeviceToHost);
-  cudaMemcpy(&l2_diff, l2_diff_d, sizeof(double), cudaMemcpyDeviceToHost);
-  
-
-  // Relative L1 and Mean L1 norms of the difference Matrix
-  float mean_l1 = l1_diff / x->num_elems;
-  float relative_l1 = l1_diff / l1_norm_A;
-
-  // Computing Relative L2 norm - i.e., Euclidean distance
-  double norm_root_A = sqrt(l2_norm_A);
-  double diff_root = sqrt(l2_diff);
-  float mean_l2 = diff_root / x->num_elems;
-  float relative_l2 = diff_root / norm_root_A;
-
-  // Packing computed norms in Norm_t struct
-  Norm_t* norms = (Norm_t*) malloc(sizeof(Norm_t));
-  // Mean metrics - not normalized for the distribution - suitable for precision tuning hardware
-  norms->mean_l1 = mean_l1;
-  norms->mean_l2 = mean_l2;
-  norms->orig_inf_norm = 0.0;
-
-  // Relative metrics (relative to distribution) - suitable for PROMISE
-  norms->l1_norm = relative_l1;
-  norms->l2_norm = relative_l2;
-  norms->inf_norm = 0.0;  
-  
-  INFO("l1_norm = %f \n", relative_l1);
-  INFO("l2_norm = %f \n", relative_l2);
-
-  return norms;
-}
-
-
-
-
-__global__ void vecConstMul(float* A, float mul_factor, int n){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(id < n)
-    A[id] = A[id] * mul_factor; 
-}
-
-
-__global__ void vecRound(float* A, int n){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(id < n)
-    A[id] = roundf(A[id]); 
-}
-
-
-__global__ void vecConstDiv(float* A, float div_factor, int n){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(id < n)
-    A[id] = A[id] / div_factor; 
-}
-
-
-
-__global__ void vecMul(float* A, float* B, int n){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(id < n)
-    B[id] = A[id] * B[id]; 
-}
-
-
-/****  ERROR injecion routines  ******/
-
-void initRandValues(Tensor* bias, int error_scale){
-
-  float scaling_values[20];
-  
-  // FIXIT: Error knob 0 should be 0 zero
-  scaling_values[0] = 0.000;
-  scaling_values[1] = 0.0005;
-  scaling_values[2] = 0.03;
-  scaling_values[3] = 0.06;
-  scaling_values[4] = 0.08;
-  scaling_values[5] = 0.105;  
-  scaling_values[6] = 0.134;
-  scaling_values[7] = 0.16;
-  scaling_values[8] = 0.2;
-  scaling_values[9] = 0.23;
-  scaling_values[10] = 0.26;
-  scaling_values[11] = 0.3;
-  scaling_values[12] = 0.35;
-  scaling_values[13] = 0.4;
-  scaling_values[14] = 0.45;
-  scaling_values[15] = 0.55;
-  scaling_values[16] = 0.65;
-  scaling_values[17] = 0.7;
-  scaling_values[18] = 0.8;
-  scaling_values[19] = 0.9;
- 
-
-  curandGenerator_t gen;
-
-  struct timespec ts;
-
-  if(timespec_get(&ts, TIME_UTC) == 0){
-    printf("crashed \n");
-    abort();
-  }
-
-  curandCreateGenerator(&gen, CURAND_RNG_PSEUDO_DEFAULT);
-
-  curandSetPseudoRandomGeneratorSeed(gen, ts.tv_nsec^ts.tv_sec);
-    
-  curandGenerateNormal(gen, (float*) bias->gpu_data, bias->num_elems, 0.0, 1.0 * scaling_values[error_scale]);
-
-}
-
-
-
-void initRandValues2(Tensor* bias, int error_scale){
-
-  float scaling_values[20];
-  
-  // FIXIT: Error knob 0 should be 0 zero
-  scaling_values[0] = 0.000;
-  scaling_values[1] = 0.0005;
-  scaling_values[2] = 0.0008;
-  scaling_values[3] = 0.001;
-  scaling_values[4] = 0.005;
-  scaling_values[5] = 0.01;  
-  scaling_values[6] = 0.02;
-  scaling_values[7] = 0.03;
-  scaling_values[8] = 0.04;
-  scaling_values[9] = 0.05;
-  scaling_values[10] = 0.06;
-  scaling_values[11] = 0.08;
-  scaling_values[12] = 0.1;
-  scaling_values[13] = 0.12;
-  scaling_values[14] = 0.15;
-  scaling_values[15] = 0.2;
-  scaling_values[16] = 0.55;
-  scaling_values[17] = 0.6;
-  scaling_values[18] = 0.65;
-  scaling_values[19] = 0.7;
-
-
-  curandGenerator_t gen;
-
-  struct timespec ts;
-
-  if(timespec_get(&ts, TIME_UTC) == 0){
-    printf("crashed \n");
-    abort();
-  }
-
-  curandCreateGenerator(&gen, CURAND_RNG_PSEUDO_DEFAULT);
-
-  curandSetPseudoRandomGeneratorSeed(gen, ts.tv_nsec^ts.tv_sec);
-    
-  curandGenerateNormal(gen, (float*) bias->gpu_data, bias->num_elems, 0.0, 1.0 * scaling_values[error_scale]);
-
-}
-
-
-void* addBitError(void* x_ptr, int error_scale){
-
-  if(error_scale > 6 || error_scale < 0){
-    ERROR("Error Scale out of bounds \n");
-  }
-      
-  INFO("*** TensorBitError \n");  
-  profileEvent("tensorBitError");
-
-  Tensor* x = (Tensor*) x_ptr;
-  
-  size_t* dim_sizes = x->dims.dim_sizes; 
-  Tensor* x_original = (Tensor*) create4DTensor(x->data_type, x->data_format,
-					        dim_sizes[0], dim_sizes[1],
-						dim_sizes[2], dim_sizes[3]);
-
-  // Copying x data into x_original - for computing Norms 
-  tensorCopy(x, x_original);
-
-  // Quadratic Error
-  float freq_factors[6];
-  freq_factors[0] = 0.1;
-  freq_factors[1] = 0.2;
-  freq_factors[2] = 0.4;
-  freq_factors[3] = 0.6;
-  freq_factors[4] = 0.8;
-  freq_factors[5] = 1.0;
-
-  float error_freq = freq_factors[error_scale];
-  
-  deviceToHostCopy(x);
-
-  unsigned char* data_arr = reinterpret_cast<unsigned char*>(x->host_data);
-  // FIXIT: Need to be careful about floating point datatype assumptions
-  long int total_bytes = x->size_in_bytes;
-  long int error_iterations = total_bytes * 0.01 * error_freq;
-  INFO("total_bytes = %lu, error_iterations = %lu \n", total_bytes, error_iterations);
-
-  srand(time(NULL));
-  
-  for(int i = 0; i < error_iterations; i++){
-    // FIXIT: The rand() is only specific to int - need long 
-    long int index = rand() % total_bytes;
-    int N = 5; // The operation below flips the Nth bit 
-    unsigned char fil = 1UL << N;
-    unsigned char val = data_arr[index];
-    char flipped = val^fil;
-    data_arr[i] = flipped;
-  }
-  
-
-  Norm_t* norms = calculateNorms2(x, x_original);
-
-  
-  profileEvent("tensorBitError_end", true);
-  
-  return (void*) norms;
-
-}
-
-
-void randomCeilAndFloor(float* x, size_t num_elems){
-
-  INFO("randomCeilAndFloor\n");
-  
-  std::random_device rd;
-  std::mt19937 mt(rd());
-  std::normal_distribution<float> distribution(0.0, 1.0);
-
-  for(size_t i = 0; i < num_elems; i++){
-    float rand_num = distribution(mt);
-    int val = abs(((int) rand_num) % 2);
-    if(val == 0)
-      x[i] = floor(x[i]);
-    else if(val == 1)
-      x[i] = ceil(x[i]);
-  }
-
-}
-
-// Routine for Adding RoundOff Errors
-void* addRoundError(void* x_ptr, int error_scale){
-
-  if(error_scale > 11 || error_scale < 0){
-    ERROR("Error Scale out of bounds \n");
-  }
-      
-  INFO("*** TensorRoundError \n");  
-  profileEvent("tensorRoundError");
-
-  Tensor* x = (Tensor*) x_ptr;
-  
-  size_t* dim_sizes = x->dims.dim_sizes; 
-  Tensor* x_original = (Tensor*) create4DTensor(x->data_type, x->data_format,
-					        dim_sizes[0], dim_sizes[1],
-						dim_sizes[2], dim_sizes[3]);
-
-  // Copying x data into x_original - for computing Norms 
-  tensorCopy(x, x_original);
-
-  float round_factors[12];
-  round_factors[0] = 1000000; // FIXIT: This should be zero error
-  round_factors[1] = 100;
-  round_factors[2] = 10;
-  round_factors[3] = 7; // Beyond this point, the error function is linear
-  round_factors[4] = 3;
-  round_factors[5] = 1;
-  round_factors[6] = 0.7;
-  round_factors[7] = 0.3;
-  round_factors[8] = 0.1;
-  round_factors[9] = 0.07;
-  round_factors[10] = 0.03;
-  round_factors[11] = 0.01;
-  
-  // THINK: Considering using error magnitudes in this scenario
-  
-
-  float round_factor = round_factors[error_scale];
-  INFO("round_factor = %f \n", round_factor);
-  
-  hostToDeviceCopy(x);
-
-  int blockSize = 128;
-  int gridSize = (int) ceil ((float) x->num_elems / blockSize);
-  INFO("blockSize = %d, gridSize = %d \n", blockSize, gridSize);
-
-  // NOTE: Check if a large gridSize will work with really large tensors
-  vecConstMul<<<gridSize, blockSize>>>((float*) x->gpu_data, round_factor, x->num_elems);
-  //vecRound<<<gridSize, blockSize>>>((float*) x->gpu_data, x->num_elems);
-  
-  deviceToHostCopy(x);
-  randomCeilAndFloor((float*) x->host_data, x->num_elems);
-  hostToDeviceCopy(x);
-  
-  vecConstDiv<<<gridSize, blockSize>>>((float*) x->gpu_data, round_factor, x->num_elems);
-  
-  Norm_t* norms = calculateNorms2(x, x_original);
-  
-  profileEvent("tensorRoundError_end", true);
-  
-  return (void*) norms;
-}
-
-
-
-
-// Routine for Adding Gaussian Error
-void* addGaussianError(void* x_ptr, int error_scale){
-
-  if(error_scale > 20 || error_scale < 0){
-    ERROR("Error Scale out of bounds \n");
-  }
-      
-  INFO("*** TensorAddError \n");  
-  profileEvent("tensorAddError");
-
-  Tensor* x = (Tensor*) x_ptr;
-  
-  size_t* dim_sizes = x->dims.dim_sizes;
-  Tensor* bias = (Tensor*) create4DTensor(x->cur_type, x->data_format,
-					  dim_sizes[0], dim_sizes[1],
-					  dim_sizes[2], dim_sizes[3]);
-  
-  Tensor* x_original = (Tensor*) create4DTensor(x->cur_type, x->data_format,
-					        dim_sizes[0], dim_sizes[1],
-						dim_sizes[2], dim_sizes[3]);
-
-  // Copying x data into x_original - for computing Norms 
-  tensorCopy(x, x_original);
-
-  // NOTE: Error scale is used to generate the bias matrix
-  initRandValues(bias, error_scale);  
-
-  hostToDeviceCopy(x);
-  //hostToDeviceCopy(bias);
-
- 
-  int blockSize = 1024;
-  int gridSize = (int) ceil ((float) x->num_elems / blockSize);
-  INFO("blockSize = %d, gridSize = %d \n", blockSize, gridSize);
-
-  // NOTE: Check if a large gridSize will work with really large tensors
-  vecMul<<<gridSize, blockSize>>>((float*) x->gpu_data, (float*) bias->gpu_data, x->num_elems);
-  
-  float alpha = 1.0f;
-    
-  // FIXIT: routine fails for 3D tensors
-  checkCUDNN(cudnnAddTensor(cudnnHandle, &alpha, bias->tensor_desc,
-			    bias->gpu_data, &alpha, x->tensor_desc, x->gpu_data));
-
-
-  //Norm_t* norms = calculateNorms2(x, x_original);
-  //Norm_t* norms = calculateNormsGPU(x, x_original);
-
-  Norm_t* norms = calculateNormsTreeReduction(x, x_original);
-  
-  freeTensor(x_original);
-  freeTensor(bias);
-  
-  
-  profileEvent("tensorAddError_end", true);
-  
-  return (void*) norms;
-}
-
-
-
-void initPromiseRandValues(Tensor* bias, int error_scale){
-
-  float scaling_values[10];
-
-  // FIXIT: Error knob 0 should be 0 zero
-  scaling_values[0] = 0.75;
-  scaling_values[1] = 0.64;
-  scaling_values[2] = 0.336;
-  scaling_values[3] = 0.21;
-  scaling_values[4] = 0.168;
-  scaling_values[5] = 0.14;  
-  scaling_values[6] = 0.11;
-  scaling_values[7] = 0.0784;
-  scaling_values[8] = 0.005;
-  scaling_values[9] = 0.000;
-
-  
-  curandGenerator_t gen;
-  struct timespec ts;
-  if(timespec_get(&ts, TIME_UTC) == 0){
-    printf("crashed \n");
-    abort();
-  }
-
-  curandCreateGenerator(&gen, CURAND_RNG_PSEUDO_DEFAULT);
-  curandSetPseudoRandomGeneratorSeed(gen, ts.tv_nsec^ts.tv_sec);
-  curandGenerateNormal(gen, (float*) bias->gpu_data, bias->num_elems, 0.0, 1.0 * scaling_values[error_scale]);
-  
-}
-
-
-// NOTE: Assumption is that x_ptr is FP32 tensor - doesn't work with FP16
-// Routine for Adding PROMISE bitline swing error
-void* addPromiseError(void* x_ptr, int error_scale){
-
-  if(error_scale > 10 || error_scale < 0){
-    ERROR("Error Scale out of bounds for PROMISE - 8 Swing values \n");
-  }
-      
-  INFO("*** addPromiseError \n");  
-  profileEvent("addPromiseError");
-
-  Tensor* x = (Tensor*) x_ptr;
-  
-  size_t* dim_sizes = x->dims.dim_sizes;
-  Tensor* bias = (Tensor*) create4DTensor(x->cur_type, x->data_format,
-					  dim_sizes[0], dim_sizes[1],
-					  dim_sizes[2], dim_sizes[3]);
- 
-  // NOTE: Error scale is used to generate the bias matrix
-  initPromiseRandValues(bias, error_scale);  
-
-  hostToDeviceCopy(x);
-  //hostToDeviceCopy(bias);
- 
-  int blockSize = 1024;
-  int gridSize = (int) ceil ((float) x->num_elems / blockSize);
-  INFO("blockSize = %d, gridSize = %d \n", blockSize, gridSize);
-
-  // NOTE: Check if a large gridSize will work with really large tensors
-  vecMul<<<gridSize, blockSize>>>((float*) x->gpu_data, (float*) bias->gpu_data, x->num_elems);
-  
-  float alpha = 1.0f;
-  //float beta = 0.0f;    
-  checkCUDNN(cudnnAddTensor(cudnnHandle, &alpha, bias->tensor_desc,
-			    bias->gpu_data, &alpha, x->tensor_desc, x->gpu_data));
-
-  profileEvent("addPromiseError_end", true);
-  
-  return (void*) x;
-}
-
-
-
-
-__global__ void quantizeAndClip(float* A, int n, float mul_factor, float min, float max){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-  if(id < n){
-    int temp = (A[id] - min) / mul_factor;
-    float result = temp * 1.0 * mul_factor;
-    result = result + min;
-    A[id] = result;
-
-    if(A[id] > max){
-      A[id] = max;
-    }
-    if(A[id] < min){
-      A[id] = min;
-    }
-    
-  }
-}
-
-
-__global__ void quantizeElem(float* A, int n, float mul_factor, float min){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-  if(id < n){
-    int temp = (A[id] - min) / mul_factor;
-    float result = temp * 1.0 * mul_factor;
-    result = result + min;
-    A[id] = result;
-  }
-}
-
-
-void* quantizeTensorPromise(void* input_ptr, float min, float max){
-
-  INFO("QuantizeTensorPROMISE \n");
-  Tensor* input = (Tensor*) input_ptr;
-
-  
-  int quantize_range = 256;
-  float input_range = max - min;
-  float mul_factor = input_range / quantize_range;
-  INFO("mul_factor = %f \n", mul_factor);
-
-  int blockSize = 1024;
-  int gridSize = (int) ceil ((float) input->num_elems / blockSize);
-  INFO("blockSize = %d, gridSize = %d \n", blockSize, gridSize);
-
-  hostToDeviceCopy(input);
-
-  quantizeAndClip<<<gridSize, blockSize>>>((float*) input->gpu_data,
-					   input->num_elems, mul_factor, min, max);
-
-  
-  return input;
-}
-
-
-void* tensorAddError(void* x_ptr, int error_scale){
-
-  void * new_x = addGaussianError(x_ptr, error_scale);
-  //void * new_x = addRoundError(x_ptr, error_scale);
-  //void * new_x = addBitError(x_ptr, error_scale);
-  return new_x;
-}
-
-
-
-/*void* tensorGemmModel(void* lhs_ptr, void* rhs_ptr){
-
-  void* gold_output = tensorGemmGPU(lhs_ptr, rhs_ptr);
-  
-  Tensor* x = (Tensor*) lhs_ptr;
-  size_t* dim_sizes = x->dims.dim_sizes; 
-
-  Tensor* x_original = (Tensor*) create4DTensor(x->data_type, x->data_format,
-					        dim_sizes[0], dim_sizes[1],
-						dim_sizes[2], dim_sizes[3]);
-
-  // Copying x data into x_original  
-  tensorCopy(x, x_original);
-
-  // NOTE: creating result tensor allocation that is to be reused
-  void* gemm_result = tensorGemmGPU((void*)x, rhs_ptr);
- 
-  int error_level1 = 5;
-  int error_level2 = 5;
-  
-  for(int i = 0; i < error_level1; i++){
-    Norm_t* x_norms = (Norm_t*) tensorAddError(x, 2);
-
-    for(int j = 0; j < error_level2; j++){
-      // NOTE: Reusing the result tensor - to keep memory usage in limits
-      void* gemm_result = tensorGemmGPU((void*)x, rhs_ptr, gemm_result);
-      Norm_t* compute_norms = (Norm_t*) tensorAddError(gemm_result, 2);
-      Norm_t* output_norms = calculateNormsTreeReduction((Tensor*) gemm_result, (Tensor*) gold_output);
-      double l1_norm = output_norms->l1_norm;
-      double l2_norm = output_norms->l2_norm;
-      printf("*********** l1_norm = %f, l2_norm = %f \n\n", l1_norm, l2_norm);
-    }
-    // Copying x data into x_original  
-    tensorCopy(x_original, x);
-  }
-  
-
-  return gold_output;
-}
-*/
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/fp16_conversion.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/fp16_conversion.h
deleted file mode 100644
index 4c2fbe806d1758118f6d55c079f9c75de42599d8..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/fp16_conversion.h
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright (c) 1993-2016, NVIDIA CORPORATION. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//  * Redistributions of source code must retain the above copyright
-//    notice, this list of conditions and the following disclaimer.
-//  * Redistributions in binary form must reproduce the above copyright
-//    notice, this list of conditions and the following disclaimer in the
-//    documentation and/or other materials provided with the distribution.
-//  * Neither the name of NVIDIA CORPORATION nor the names of its
-//    contributors may be used to endorse or promote products derived
-//    from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
-// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-// PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR
-// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
-// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// This code modified from the public domain code here: 
-// https://gist.github.com/rygorous/2156668
-// The URL above includes more robust conversion routines
-// that handle Inf and NaN correctly. 
-// 
-// It is recommended to use the more robust versions in production code.
-
-
-#ifndef FP16_CONV_HEADER
-#define FP16_CONV_HEADER
-
-
-
-typedef unsigned uint;
-
-union FP32
-{
-    uint u;
-    float f;
-    struct
-    {
-        uint Mantissa : 23;
-        uint Exponent : 8;
-        uint Sign : 1;
-    };
-};
-
-union FP16
-{
-    unsigned short u;
-    struct
-    {
-        uint Mantissa : 10;
-        uint Exponent : 5;
-        uint Sign : 1;
-    };
-};
-
-// Approximate solution. This is faster but converts some sNaNs to
-// infinity and doesn't round correctly. Handle with care.
-// Approximate solution. This is faster but converts some sNaNs to
-// infinity and doesn't round correctly. Handle with care.
-static half approx_float_to_half(float fl)
-{
-    FP32 f32infty = { 255 << 23 };
-    FP32 f16max = { (127 + 16) << 23 };
-    FP32 magic = { 15 << 23 };
-    FP32 expinf = { (255 ^ 31) << 23 };
-    uint sign_mask = 0x80000000u;
-    FP16 o = { 0 };
-
-    FP32 f = *((FP32*)&fl);
-
-    uint sign = f.u & sign_mask;
-    f.u ^= sign;
-
-    if (!(f.f < f32infty.u)) // Inf or NaN
-        o.u = f.u ^ expinf.u;
-    else
-    {
-        if (f.f > f16max.f) f.f = f16max.f;
-        f.f *= magic.f;
-    }
-
-    o.u = f.u >> 13; // Take the mantissa bits
-    o.u |= sign >> 16;
-    return *((half*)&o);
-}
-
-// from half->float code - just for verification.
-static float half_to_float(half hf)
-{
-    FP16 h = *((FP16*)&hf);
-
-    static const FP32 magic = { 113 << 23 };
-    static const uint shifted_exp = 0x7c00 << 13; // exponent mask after shift
-    FP32 o;
-
-    o.u = (h.u & 0x7fff) << 13;     // exponent/mantissa bits
-    uint exp = shifted_exp & o.u;   // just the exponent
-    o.u += (127 - 15) << 23;        // exponent adjust
-
-    // handle exponent special cases
-    if (exp == shifted_exp) // Inf/NaN?
-        o.u += (128 - 16) << 23;    // extra exp adjust
-    else if (exp == 0) // Zero/Denormal?
-    {
-        o.u += 1 << 23;             // extra exp adjust
-        o.f -= magic.f;             // renormalize
-    }
-
-    o.u |= (h.u & 0x8000) << 16;    // sign bit
-    return o.f;
-}
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/fp16_emu.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/fp16_emu.h
deleted file mode 100644
index 64aee8231b54d52710192fc7d598d6ed162f1338..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/fp16_emu.h
+++ /dev/null
@@ -1,274 +0,0 @@
-/*
- * Copyright 1993-2014 NVIDIA Corporation.  All rights reserved.
- *
- * NOTICE TO LICENSEE:
- *
- * This source code and/or documentation ("Licensed Deliverables") are
- * subject to NVIDIA intellectual property rights under U.S. and
- * international Copyright laws.
- *
- * These Licensed Deliverables contained herein is PROPRIETARY and
- * CONFIDENTIAL to NVIDIA and is being provided under the terms and
- * conditions of a form of NVIDIA software license agreement by and
- * between NVIDIA and Licensee ("License Agreement") or electronically
- * accepted by Licensee.  Notwithstanding any terms or conditions to
- * the contrary in the License Agreement, reproduction or disclosure
- * of the Licensed Deliverables to any third party without the express
- * written consent of NVIDIA is prohibited.
- *
- * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
- * LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE
- * SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE.  IT IS
- * PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
- * NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED
- * DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY,
- * NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
- * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
- * LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY
- * SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY
- * DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
- * WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
- * ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
- * OF THESE LICENSED DELIVERABLES.
- *
- * U.S. Government End Users.  These Licensed Deliverables are a
- * "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT
- * 1995), consisting of "commercial computer software" and "commercial
- * computer software documentation" as such terms are used in 48
- * C.F.R. 12.212 (SEPT 1995) and is provided to the U.S. Government
- * only as a commercial end item.  Consistent with 48 C.F.R.12.212 and
- * 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all
- * U.S. Government End Users acquire the Licensed Deliverables with
- * only those rights set forth herein.
- *
- * Any use of the Licensed Deliverables in individual and commercial
- * software must include, in the user documentation and internal
- * comments to the code, the above Disclaimer and U.S. Government End
- * Users Notice.
- */
-
-// Conversion from/to 16-bit floating point (half-precision).
-
-#if !defined(_FP16_EMU_H_)
-#define _FP16_EMU_H_
-
-#include <driver_types.h>
-#include <cuda_fp16.h>
-
-// Necessary to ensure visibility of CUDART_VERSION macro
-#include <cuda_runtime_api.h>
-
-// Definition of '__half_raw' was not provided before CUDA 9.0.
-// '__half_raw' is our type where the unsigned 16-bit integer 
-// data member 'x' can be accessed in both CUDA 9.0 and 8.0.
-#if CUDART_VERSION < 9000 
-typedef __half __half_raw;
-#endif
-
-// Internally, in CUDNN we use half1 struct as the FP16 type.
-typedef __half half1;
-
-#define HLF_EPSILON 4.887581E-04
-#define HLF_MIN     6.103516E-05
-#define HLF_MAX     6.550400E+04
-
-half1 cpu_float2half_rn(float f);
-
-float cpu_half2float(half1 h);
-
-static __inline__ __device__ __host__ half1 habs(half1 h)
-{
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-    hr.x &= 0x7fffU;
-    return reinterpret_cast<half1&>(hr);
-}
-
-static __inline__ __device__ __host__ half1 hneg(half1 h)
-{
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-    hr.x ^= 0x8000U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-static __inline__ __device__ __host__ int ishnan(half1 h)
-{
-    // When input is NaN, exponent is all ones and mantissa is non-zero.
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-    return (hr.x & 0x7c00U) == 0x7c00U && (hr.x & 0x03ffU) != 0;
-}
-
-static __inline__ __device__ __host__ int ishinf(half1 h)
-{
-    // When input is +/- inf, exponent is all ones and mantissa is zero.
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-    return (hr.x & 0x7c00U) == 0x7c00U && (hr.x & 0x03ffU) == 0;
-}
-
-static __inline__ __device__ __host__ int ishequ(half1 x, half1 y)
-{
-    __half_raw xr = reinterpret_cast<__half_raw&>(x);
-    __half_raw yr = reinterpret_cast<__half_raw&>(y);
-    return ishnan(x) == 0 && ishnan(y) == 0 && xr.x == yr.x;
-}
-
-// Returns 0.0000 in FP16 binary form
-static __inline__ __device__ __host__ half1 hzero()
-{
-    __half_raw hr;
-    hr.x = 0x0000U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-// Returns 1.0000 in FP16 binary form
-static __inline__ __device__ __host__ half1 hone()
-{
-    __half_raw hr;
-    hr.x = 0x3c00U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-// Returns quiet NaN, the most significant fraction bit #9 is set
-static __inline__ __device__ __host__ half1 hnan()
-{
-    __half_raw hr;
-    hr.x = 0x7e00U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-// Largest positive FP16 value, corresponds to 6.5504e+04
-static __inline__ __device__ __host__ half1 hmax()
-{
-    // Exponent all ones except LSB (0x1e), mantissa is all ones (0x3ff)
-    __half_raw hr;
-    hr.x = 0x7bffU;
-    return reinterpret_cast<half1&>(hr);
-}
-
-// Smallest positive (normalized) FP16 value, corresponds to 6.1035e-05
-static __inline__ __device__ __host__ half1 hmin()
-{
-    // Exponent is 0x01 (5 bits), mantissa is all zeros (10 bits)
-    __half_raw hr;
-    hr.x = 0x0400U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-
-
-
-
-
-
-
-
-
-
-#define STATIC_ASSERT(cond) do { typedef char compile_time_assert[(cond) ? 1 : -1]; } while (0)
-
-// Host functions for converting between FP32 and FP16 formats
-// Paulius Micikevicius (pauliusm@nvidia.com)
-
-half1 cpu_float2half_rn(float f)
-{
-    unsigned x = *((int*)(void*)(&f));
-    unsigned u = (x & 0x7fffffff), remainder, shift, lsb, lsb_s1, lsb_m1;
-    unsigned sign, exponent, mantissa;
-
-    __half_raw hr;
-
-    // Get rid of +NaN/-NaN case first.
-    if (u > 0x7f800000) {
-        hr.x = 0x7fffU;
-        return reinterpret_cast<half1&>(hr);
-    }
-  
-    sign = ((x >> 16) & 0x8000);
-  
-    // Get rid of +Inf/-Inf, +0/-0.
-    if (u > 0x477fefff) {
-        hr.x = sign | 0x7c00U;
-        return reinterpret_cast<half1&>(hr);
-    }
-    if (u < 0x33000001) {
-        hr.x = sign | 0x0000U;
-        return reinterpret_cast<half1&>(hr);
-    }
-
-    exponent = ((u >> 23) & 0xff);
-    mantissa = (u & 0x7fffff);
-
-    if (exponent > 0x70) {
-        shift = 13;
-        exponent -= 0x70;
-    } else {
-        shift = 0x7e - exponent;
-        exponent = 0;
-        mantissa |= 0x800000;
-    }
-    lsb = (1 << shift);
-    lsb_s1 = (lsb >> 1);
-    lsb_m1 = (lsb - 1);
-  
-    // Round to nearest even.
-    remainder = (mantissa & lsb_m1);
-    mantissa >>= shift;
-    if (remainder > lsb_s1 || (remainder == lsb_s1 && (mantissa & 0x1))) {
-        ++mantissa;
-        if (!(mantissa & 0x3ff)) {
-            ++exponent;
-            mantissa = 0;
-        }
-    }  
-
-    hr.x = (sign | (exponent << 10) | mantissa);  
-
-    return reinterpret_cast<half1&>(hr);
-}
-
-
-float cpu_half2float(half1 h)
-{
-    STATIC_ASSERT(sizeof(int) == sizeof(float));
-
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-
-    unsigned sign     = ((hr.x >> 15) & 1);
-    unsigned exponent = ((hr.x >> 10) & 0x1f);
-    unsigned mantissa = ((hr.x & 0x3ff) << 13);
-
-    if (exponent == 0x1f) {  /* NaN or Inf */
-        mantissa = (mantissa ? (sign = 0, 0x7fffff) : 0);
-        exponent = 0xff;
-    } else if (!exponent) {  /* Denorm or Zero */
-        if (mantissa) {
-            unsigned int msb;
-            exponent = 0x71;
-            do {
-                msb = (mantissa & 0x400000);
-                mantissa <<= 1;  /* normalize */
-                --exponent;
-            } while (!msb);
-            mantissa &= 0x7fffff;  /* 1.mantissa is implicit */
-        }
-    } else {
-        exponent += 0x70;
-    }
-
-    int temp = ((sign << 31) | (exponent << 23) | mantissa);
-
-    return reinterpret_cast<float&>(temp);
-}
-
-
-
-
-
-
-
-#endif  // _FP16_EMU_H_
-
-
-
-
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/fp16_gemm.cu b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/fp16_gemm.cu
deleted file mode 100644
index 0649b5f58b7044cc9982e24975006c6d3cb35565..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/fp16_gemm.cu
+++ /dev/null
@@ -1,279 +0,0 @@
-
-
-#ifndef FP16_UTILS_HEADER
-#define FP16_UTILS_HEADER
-
-#include <iostream>
-#include <string>
-#include <cublas_v2.h>
-#include <cuda_fp16.h>
-#include "fp16_emu.h"
-
-inline cudaError_t checkCuda(cudaError_t result) {
-    if (result != cudaSuccess)
-        std::cerr << "CUDA Runtime Error: " << cudaGetErrorString(result) << "\n";
-    return result;
-}
-
-inline cublasStatus_t checkCublas(cublasStatus_t result) {
-    if (result != CUBLAS_STATUS_SUCCESS)
-        std::cerr << "cuBLAS Error: " << result << "\n";
-    return result;
-}
-
-template <typename T>
-inline void printArray(const T * const __restrict__ array,
-                       const unsigned elements) {
-    for (unsigned i = 0; i < elements; i++)
-        std::cout << std::to_string(array[i]) << "\n";
-}
-
-// initialization
-template <typename T>
-__global__ void initKernel(T * const __restrict__ array,
-                           const unsigned elements) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        array[idx] = 1.2;
-}
-
-template <typename T>
-void init(T * const __restrict__ array,
-          const unsigned elements) {
-    const unsigned block_size = 512;
-    const unsigned num_blocks = (elements + block_size - 1) / block_size;
-    initKernel<<<num_blocks, block_size>>>(array, elements);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// float to half
-__global__ void f2hKernel(const float * const __restrict__ input,
-                          const unsigned elements,
-                          half * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = __float2half_rn(input[idx]);
-}
-
-void f2h(const float * const __restrict__ input,
-         const unsigned elements,
-         half * const __restrict__ output) {
-    const unsigned block_size = 512;
-    const unsigned num_blocks = (elements + block_size - 1) / block_size;
-    f2hKernel<<<num_blocks, block_size>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// half to float
-__global__ void h2fKernel(const half * const __restrict__ input,
-                          const unsigned elements,
-                          float * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = __half2float(input[idx]);
-}
-
-void h2f(const half * const __restrict__ input,
-         const unsigned elements,
-         float * const __restrict__ output) {
-    const unsigned block_size = 512;
-    const unsigned num_blocks = (elements + block_size - 1) / block_size;
-    h2fKernel<<<num_blocks, block_size>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-void sgemm(const float * const __restrict__ a,
-           const unsigned num_rows_a,
-           const unsigned num_cols_a,
-           const float * const __restrict__ b,
-           const unsigned num_rows_b,
-           const unsigned num_cols_b,
-           float * const __restrict__ c) {
-    const unsigned iterations = 10;
-    float kernel_time;
-    cudaEvent_t start;
-    cudaEvent_t stop;
-    cudaEventCreate(&start);
-    cudaEventCreate(&stop);
-
-    cublasHandle_t handle;
-    checkCublas(cublasCreate(&handle));
-
-    // Enable Tensor Cores
-    checkCublas(cublasSetMathMode(handle, CUBLAS_TENSOR_OP_MATH));
-
-    const float alpha_ = 1.0;
-    const float beta_  = 0.0;
-    const float *alpha = &alpha_;
-    const float *beta  = &beta_;
-
-    cudaEventRecord(start, 0);
-    for (unsigned i = 0; i < iterations; i++) {
-        checkCublas(cublasGemmEx(handle,
-                                 CUBLAS_OP_N,
-                                 CUBLAS_OP_N,
-                                 // Dimensions
-                                 num_rows_a,
-                                 num_cols_b,
-                                 num_cols_a,
-                                 alpha,
-                                 // A
-                                 a,
-                                 CUDA_R_32F,
-                                 num_rows_a,
-                                 // B
-                                 b,
-                                 CUDA_R_32F,
-                                 num_rows_b,
-                                 beta,
-                                 // C
-                                 c,
-                                 CUDA_R_32F,
-                                 num_rows_a,
-                                 // Compute precision and algorithm
-                                 CUDA_R_32F,
-                                 CUBLAS_GEMM_DEFAULT_TENSOR_OP));
-    }
-    cudaEventRecord(stop, 0);
-    cudaEventSynchronize(stop);
-    cudaEventElapsedTime(&kernel_time, start, stop);
-
-    std::cout << "FP32 GEMM: " << std::to_string(kernel_time / iterations) << " ms\n";
-}
-
-void hgemm(const float * const __restrict__ af,
-           const unsigned num_rows_a,
-           const unsigned num_cols_a,
-           const float * const __restrict__ bf,
-           const unsigned num_rows_b,
-           const unsigned num_cols_b,
-           float * const __restrict__ cf) {
-    const unsigned iterations = 10;
-
-    const unsigned num_elements_a = num_rows_a * num_cols_a;
-    const unsigned num_elements_b = num_rows_b * num_cols_b;
-    const unsigned num_elements_c = num_rows_a * num_cols_b;
-
-    float to_fp16_time;
-    float to_fp32_time;
-    float kernel_time;
-    float total_time;
-
-    cudaEvent_t start;
-    cudaEvent_t stop;
-    cudaEventCreate(&start);
-    cudaEventCreate(&stop);
-
-    half *a;
-    half *b;
-    half *c;
-
-    checkCuda(cudaMallocManaged(&a, sizeof(half) * num_elements_a));
-    checkCuda(cudaMallocManaged(&b, sizeof(half) * num_elements_b));
-    checkCuda(cudaMallocManaged(&c, sizeof(half) * num_elements_c));
-
-    init(a, num_elements_a);
-    init(b, num_elements_b);
-    init(c, num_elements_c);
-
-    // Convert floats to halfs
-    cudaEventRecord(start, 0);
-    f2h(af, num_elements_a, a);
-    f2h(bf, num_elements_b, b);
-    cudaEventRecord(stop, 0);
-    cudaEventSynchronize(stop);
-    cudaEventElapsedTime(&to_fp16_time, start, stop);
-
-    cublasHandle_t handle;
-    checkCublas(cublasCreate(&handle));
-    checkCublas(cublasSetMathMode(handle, CUBLAS_TENSOR_OP_MATH));
-
-    const half alpha_ = cpu_float2half_rn(1.0);
-    const half beta_  = cpu_float2half_rn(0.0);
-    const half *alpha = &alpha_;
-    const half *beta  = &beta_;
-
-    cudaEventRecord(start, 0);
-    for (unsigned i = 0; i < iterations; i++) {
-        checkCublas(cublasGemmEx(handle,
-                                 CUBLAS_OP_N,
-                                 CUBLAS_OP_N,
-                                 // Dimensions
-                                 num_rows_a,
-                                 num_cols_b,
-                                 num_cols_a,
-                                 alpha,
-                                 // A
-                                 a,
-                                 CUDA_R_16F,
-                                 num_rows_a,
-                                 // B
-                                 b,
-                                 CUDA_R_16F,
-                                 num_rows_b,
-                                 beta,
-                                 // C
-                                 c,
-                                 CUDA_R_16F,
-                                 num_rows_a,
-                                 // Compute precision and algorithm
-                                 CUDA_R_16F,
-                                 CUBLAS_GEMM_DEFAULT_TENSOR_OP));
-    }
-    cudaEventRecord(stop, 0);
-    cudaEventSynchronize(stop);
-    cudaEventElapsedTime(&kernel_time, start, stop);
-
-    cudaEventRecord(start, 0);
-    h2f(c, num_elements_c, cf);
-    cudaEventRecord(stop, 0);
-    cudaEventSynchronize(stop);
-    cudaEventElapsedTime(&to_fp32_time, start, stop);
-
-    total_time = to_fp16_time + (kernel_time / iterations) + to_fp32_time;
-    std::cout << "FP16 GEMM: " << std::to_string(total_time) << " ms\n";
-    std::cout << "\tTo FP16: " << std::to_string(to_fp16_time) << " ms\n";
-    std::cout << "\tKernel : " << std::to_string(kernel_time / iterations) << " ms\n";
-    std::cout << "\tTo FP32: " << std::to_string(to_fp32_time) << " ms\n";
-}
-
-
-/*int main() {
-    const unsigned num_rows_a = 5000 * 14 * 14;
-    const unsigned num_cols_a = 800;
-    const unsigned num_rows_b = num_cols_a;
-    const unsigned num_cols_b = 64;
-
-    const unsigned num_elements_a = num_rows_a * num_cols_a;
-    const unsigned num_elements_b = num_rows_b * num_cols_b;
-    const unsigned num_elements_c = num_rows_a * num_cols_b;
-
-    float *a;
-    float *b;
-    float *c;
-
-    checkCuda(cudaMallocManaged(&a, sizeof(float) * num_elements_a));
-    checkCuda(cudaMallocManaged(&b, sizeof(float) * num_elements_b));
-    checkCuda(cudaMallocManaged(&c, sizeof(float) * num_elements_c));
-
-    init(a, num_elements_a);
-    init(b, num_elements_b);
-    init(c, num_elements_c);
-
-    // FP32
-    sgemm(a, num_rows_a, num_cols_a, b, num_rows_b, num_cols_b, c);
-    printArray(c, 16);
-
-    // FP16
-    hgemm(a, num_rows_a, num_cols_a, b, num_rows_b, num_cols_b, c);
-    printArray(c, 16);
-
-    checkCuda(cudaFree(a));
-    checkCuda(cudaFree(b));
-    checkCuda(cudaFree(c));
-
-    return 0;
-}
-*/
-
-#endif
\ No newline at end of file
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/global_data.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/global_data.h
deleted file mode 100644
index 230cb31f4de4740428737e52ad2834908566a07b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/global_data.h
+++ /dev/null
@@ -1,61 +0,0 @@
-
-#ifndef GLOBAL_DATA_HEADER
-#define GLOBAL_DATA_HEADER
-
-
-#include <stdio.h>
-#include <stdarg.h>
-#include <cstdio>
-#include <cstdlib>
-
-#include <cuda_runtime.h>
-#include <device_launch_parameters.h>
-
-#include <cublas_v2.h>
-#include <cudnn.h>
-#include <cublas_api.h>
-#include "tensor.h"
-#include <string>
-#include <unordered_map>
-
-#define ERROR_INJECTION_ENABLED 0
-#define PROMISE_MODE 1
-
-
-#ifdef NO_INJECTION
-#undef ERROR_INJECTION_ENABLED
-#endif
-
-
-//#define ERROR_INJECTION_ENABLED 1
-/* Data declarations */
-cudnnHandle_t cudnnHandle;
-cublasHandle_t cublasHandle;
-
-bool runtime_initialized = false;
-// NOTE: Layers Mode is True or Approxhpvm wrappper runtime mode
-bool approxhpvm_runtime_mode = false;
-
-
-int op_counter = 0;
-int total_ops = 0;
-// NOTE: Both vectors asssume a linear CFG
-// FIXME: Each operation should have an ID passed to the runtime
-std::vector<int> op_accuracies;
-std::vector<Range*> quant_ranges;
-
-std::vector<void*> tensors_ptr;
-std::vector<void*> host_ptr;
-std::vector<void*> obj_ptr;
-
-std::unordered_map<void*, int> tracked_tensors;
-
-// Autotuning data
-std::unordered_map<int, int> skip_tensors;
-
-// Profiling Data
-std::unordered_map<std::string, int> func_counters;
-std::string profile_data = "";
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/half_precision_api.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/half_precision_api.h
deleted file mode 100644
index 94e1a635b5a6baec9fec6c91509caee5cf287e01..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/half_precision_api.h
+++ /dev/null
@@ -1,618 +0,0 @@
-
-
-#ifndef HALF_API_HEADER
-#define HALF_API_HEADER
-
-
-#include <stdio.h>
-#include <stdarg.h>
-#include <cstdio>
-#include <cstdlib>
-#include <cmath>
-#include <ctime>
-#include <cfloat>
-#include <iostream>
-#include <map>
-#include <memory>
-#include <random>
-#include <sstream>
-#include <string>
-#include <vector>
-
-#include <cuda_runtime.h>
-#include <device_launch_parameters.h>
-
-#include <cublas_v2.h>
-#include <cudnn.h>
-#include <cublas_api.h>
-#include <cuda_fp16.h>
-#include <driver_types.h>
-
-
-// Tensor runtime header files
-#include "../include/tensor_runtime.h"
-#include "../include/tensor_utils.cu"
-#include "../include/debug.h"
-#include "../include/profiling.h"
-#include "../include/global_data.h"
-#include "../include/tensor.h"
-#include "../include/fp16_gemm.cu"
-
-
-
-void* tensorHalfGemm(void* lhs_ptr, void* rhs_ptr){
-
-  INFO("*** TensorHalfGemm \n");
-  profileEvent("#Mul");
-
-  Tensor* lhs = (Tensor*) lhs_ptr;
-  Tensor* rhs = (Tensor*) rhs_ptr;
-
-  INFO("rhs->dims.num_dims = %d \n", rhs->dims.num_dims);
-  INFO("lhs->dims.num_dims = %d \n", lhs->dims.num_dims);
-
-  hostToDeviceCopy(lhs);
-  hostToDeviceCopy(rhs);
-
-  
-  profileEvent("F2H_start");
-
-  convertToFP16(lhs);
-  convertToFP16(rhs);
-  
-  profileEvent("F2H_end");
-
-
-  // 'm' holds the batch dimension - assuming NCHW format Tensors
-  int m = lhs->dims.dim_sizes[0];
-  // The rhs last dimension must contain the neurons
-  int n = rhs->dims.dim_sizes[rhs->dims.num_dims-1]; // output neurons
-  int k = 1;
-
-  for (int j = 1 ; j < lhs->dims.num_dims; j++){
-    k = k * lhs->dims.dim_sizes[j]; // input neurons
-  }
-
-  int rhs_k = rhs->dims.dim_sizes[rhs->dims.num_dims-2];
-  // Dimension-note: Check if k is same across the two tensors
-  INFO("m = %d, n = %d, k = %d \n", m, n, k);
-  if(rhs_k != k){
-    ERROR("rhs=%d and lhs=%d columns/rows don't match", rhs_k, k);
-  }
-
-  // NOTE: Creating a 4D tensor to be compatible with later called cuDNN routines
-  Tensor* output = (Tensor*) create4DTensor(half_type, CUDNN_TENSOR_NCHW,
-					    m, n, 1, 1);
-
-  changeTensorPlacement(output, DEVICE);
-
-  //convertToFP16(output);
-
-
-  // INFO: cuBlas uses column-major format
-  // INFO: The leading dimension is just the FIRST Dimension
-  // IMP: output is N * M in column-major format, M*N in row-major - what cuDNN expects
-  const __half alf = approx_float_to_half(1.0);
-  const __half bet = approx_float_to_half(0.0);
-  const __half *alpha_half = &alf;
-  const __half *beta_half = &bet;
-
-
-  checkCudaErrors(cublasGemmEx(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-			       n, m, k,
-			       alpha_half,
-			       (__half*) rhs->gpu_half_data, CUDA_R_16F, n,
-			       (__half*) lhs->gpu_half_data, CUDA_R_16F, k,
-			       beta_half,
-			       (__half*) output->gpu_half_data, CUDA_R_16F, n,
-			       CUDA_R_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP) );
-
-
-  profileEvent("H2F_start");
-
-  convertToFP32_offline(output);
-
-  //h2f((half*) output_half->gpu_data, output->num_elems, (float*) output->gpu_data);
-
-
-  profileEvent("H2F_end");
-
-
-  profileEvent("#tensorHalfGemm_end");
-
-
-
-  return output;
-}
-
-
-
-void* tensorHalfGemmGPU(void* lhs_ptr, void* rhs_ptr){
-  return tensorHalfGemm(lhs_ptr, rhs_ptr);
-}
-
-
-
-// FIXIT: Generalize all of the routines for types {half, float, double}
-void* tensorHalfConvolution(void* input_ptr, void* filter_ptr,
-			    int vertical_pad, int horizontal_pad,
-			    int vertical_stride, int horizontal_stride,
-			    int conv_mode, int conv_groups){
-
-  INFO("*** TensorHConvolution \n");
-  profileEvent("#Conv");
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* filter = (Tensor*) filter_ptr;
-
-  cudnnConvolutionDescriptor_t convDesc;
-  cudnnConvolutionFwdAlgo_t convAlgo;
-  cudnnConvolutionMode_t mode;
-  if(conv_mode == 0)
-    mode = CUDNN_CONVOLUTION;
-  else if(conv_mode == 1)
-    mode = CUDNN_CROSS_CORRELATION;
-
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  float alpha = 1.0f, beta = 0.0f;
-  // NOTE: compute in half precision
-  cudnnDataType_t computeType = CUDNN_DATA_HALF;
-
-  // NOTE: Moving inputs to GPU global memory
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-
-  /***** CONVERSIONS from FP32 to FP16 - on the GPU */
-  profileEvent("F2H_start");
-
-  convertToFP16(input);
-  convertToFP16(filter);
-
-  profileEvent("F2H_end");
-  /******* END OF INPUT DATA CONVERSIONS*/
-
-  
-
-  checkCUDNN(cudnnCreateConvolutionDescriptor(&convDesc));
-
-  //FIXME: Current hack to preserve backward compatibilty
-  if(conv_groups == 0){
-    conv_groups = 1;
-  }
-  
-  // NOTE: Adding support for grouped convolution
-  checkCUDNN(cudnnSetConvolutionGroupCount(convDesc, conv_groups));
-
-  
-  // FIXIT: Think if upscaling values need to be configurable?
-  // IMP-FIXIT:  CUDNN Cross correlation is only used in the Lenet context
-  // IMP-FIXIT: Either make mode configurable OR see if CUDNN_CONVOLUTION MODE should be used?
-  checkCUDNN(cudnnSetConvolution2dDescriptor(convDesc,
-					     vertical_pad, horizontal_pad, // conv padding
-					     vertical_stride, horizontal_stride, // conv strides
-					     1, 1, // upscaling values
-					     mode, // mode is configurable
-					     computeType)); // defines compute precision
-
-  int n, c, h, w; // output dimensions
-  // Find dimension of convolution output
-  checkCUDNN(cudnnGetConvolution2dForwardOutputDim(convDesc,
-						   input->tensor_desc,
-						   filter->filter_desc,
-						   &n, &c, &h, &w));
-  
-  DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-
-  Tensor* output = (Tensor*) create4DTensor((cudnnDataType_t) half_type, // input->data_type,
-					    CUDNN_TENSOR_NCHW, n, c, h, w);
-
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE);
-
-  //convertToFP16(output);
-
-  
-  // NOTE: Necessary to insert the above call for every output tensor
-
-  DEBUG("tensor->data_type = %d, tensor->data_format = %d, N = %d, H = %d, W = %d, C = %d \n",
-	output->data_type, output->data_format,
-	output->dims.dim_sizes[0], output->dims.dim_sizes[1],
-	output->dims.dim_sizes[2], output->dims.dim_sizes[3]);
-
-  if(convDesc == NULL || input->tensor_half_desc == NULL ||
-     filter->filter_half_desc == NULL || output->tensor_half_desc == NULL)
-    ERROR("NULL descriptor! \n");
-
-
-  // NOTE: The following algo works with TRUE half precision
-  convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_PRECOMP_GEMM;
-  //convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_GEMM;
-
-  
-  size_t workspace_size;
-  checkCUDNN(cudnnGetConvolutionForwardWorkspaceSize(cudnnHandle,
-						     input->tensor_half_desc,
-						     filter->filter_half_desc,
-						     convDesc,
-						     output->tensor_half_desc,
-						     convAlgo,
-						     &workspace_size));
-
-  // Allocating memory for the convolution workspace
-  DEBUG("workspace size = %d \n", workspace_size);
-  void* workspace;
-  checkCudaErrors(cudaMalloc(&workspace, workspace_size));
-
-
-
-
-  checkCUDNN(cudnnConvolutionForward(cudnnHandle,
-				     &alpha,
-				     input->tensor_half_desc,
-				     input->gpu_half_data,
-				     filter->filter_half_desc,
-				     filter->gpu_half_data,
-				     convDesc, convAlgo,
-				     workspace, workspace_size,
-				     &beta,
-				     output->tensor_half_desc,
-				     output->gpu_half_data));
-
-
-  profileEvent("H2F_start");
-
-  convertToFP32_offline(output);
-
-  profileEvent("H2F_end");
-
-
-  
-  profileEvent("#tensorHalfConv_end");
-
-
-  return output;
-}
-
-
-
-
-void* tensorHalfBatchNorm(void* input_ptr, void* gamma_ptr, void* beta_ptr,
-           		  void* mean_ptr, void* variance_ptr, double epsilon){
-
-  INFO("*** TensorHalfBatchNorm \n");
-  profileEvent("#BatchNorm");
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* gamma = (Tensor*) gamma_ptr;
-  Tensor* beta = (Tensor*) beta_ptr;
-  Tensor* mean = (Tensor*) mean_ptr;
-  Tensor* variance = (Tensor*) variance_ptr;
-  
-  float alpha_val = 1.0f, beta_val = 0.0f;
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(gamma);
-  hostToDeviceCopy(beta);
-  hostToDeviceCopy(mean);
-  hostToDeviceCopy(variance);
-
-  
-  profileEvent("F2H_start");
-
-  convertToFP16(input);
-
-  profileEvent("F2H_end");
-  
-
-
-  checkCUDNN(cudnnBatchNormalizationForwardInference(cudnnHandle, CUDNN_BATCHNORM_SPATIAL,
-						     &alpha_val, &beta_val,
-						     input->tensor_half_desc,
-						     input->gpu_half_data,
-						     input->tensor_half_desc,
-						     input->gpu_half_data,
-						     gamma->tensor_desc, gamma->gpu_data,
-						     beta->gpu_data, mean->gpu_data,
-						     variance->gpu_data, epsilon));
-
-
-
-  profileEvent("H2F_start");
-
-  convertToFP32_offline(input);
-  
-  profileEvent("H2F_end");
-
-
-  
-  profileEvent("#tensorHalfBatchNorm_end", true);
-
-
-  return input;
-}
-
-
-
-
-void* tensorHalfPooling(void* input_ptr,
-			int poolFunction,
-			int window_height, int window_width,
-			int vertical_pad, int horizontal_pad,
-			int vertical_stride, int horizontal_stride){
-
-  
-
-  INFO("*** TensorHalfPooling \n");
-  profileEvent("#Pool");
-
-  Tensor* input = (Tensor*) input_ptr;
-
-  hostToDeviceCopy(input);
-
-  /** floating point to half conversion */
-  profileEvent("F2H_start");
-
-  convertToFP16(input);
-
-  profileEvent("F2H_end");
-  //*** end of data conversions
-
-  cudnnPoolingDescriptor_t poolDesc;
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  float alpha = 1.0f, beta = 0.0f;
-
-
-  checkCUDNN(cudnnCreatePoolingDescriptor(&poolDesc));
-
-  int n = input->dims.dim_sizes[0];
-  int c = input->dims.dim_sizes[1];
-  int h = (input->dims.dim_sizes[2] + (2 * vertical_pad) - window_height) / vertical_stride;
-  h = h + 1;
-  int w = (input->dims.dim_sizes[3] + (2 * horizontal_pad) - window_width) / horizontal_stride;
-  w = w + 1;
-
-  DEBUG("n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-  // FIXIT: Don't be specific to floats
-  Tensor* output = (Tensor*) create4DTensor(half_type, CUDNN_TENSOR_NCHW, n, c, h, w);
-  // Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE);
-
-  //convertToFP16(output);
-
-  // FIXIT: Fix being specific to CUDNN_DATA_FLOAT and NCHW format
-  // FIXIT: Is this setTensor even needed?
-  checkCUDNN(cudnnSetTensor4dDescriptor(output->tensor_half_desc,
-					CUDNN_TENSOR_NCHW,
-					CUDNN_DATA_HALF,
-					n, c,
-					h, w));
-
-  cudnnPoolingMode_t pool_mode;
-  if(poolFunction == 0)
-    pool_mode = CUDNN_POOLING_MAX;
-  else if(poolFunction == 1)
-    pool_mode = CUDNN_POOLING_AVERAGE_COUNT_EXCLUDE_PADDING;
-
-
-  // FIXIT: Make the pool function (max, min, avg) configurable
-  checkCUDNN(cudnnSetPooling2dDescriptor(poolDesc,
-					 pool_mode,
-					 CUDNN_PROPAGATE_NAN,
-					 window_height, window_width,
-					 vertical_pad, horizontal_pad,
-					 vertical_stride, horizontal_stride));
-
-  
-  checkCUDNN(cudnnPoolingForward(cudnnHandle, poolDesc, &alpha,
-				 input->tensor_half_desc,
-				 input->gpu_half_data, &beta,
-				 output->tensor_half_desc, output->gpu_half_data));
-
-
-
-  profileEvent("H2F_start");
-
-  convertToFP32_offline(output);
-  
-  profileEvent("H2F_end");
-
-  
-  profileEvent("#tensorHalfPooling_end", true);
-
-  return output;
-}
-
-
-
-
-
-void* tensorHalfRelu2(void* input_ptr, float min, float max){
-
-  INFO("*** TensorClippedRelu \n");
-  profileEvent("#Relu");
-
-  Tensor* input = (Tensor*) input_ptr;
-
-  cudnnActivationDescriptor_t reluDesc;
-  float alpha = 1.0f, beta = 0.0f;
-  hostToDeviceCopy(input);
-
-
-  //**** Floating point to half conversions
-  profileEvent("F2H_start");
-
-  convertToFP16(input);
-  
-  profileEvent("F2H_end");
-  /*** End of data type conversion **/
-
-
-  checkCUDNN(cudnnCreateActivationDescriptor(&reluDesc));
-
-  checkCUDNN(cudnnSetActivationDescriptor(reluDesc, CUDNN_ACTIVATION_CLIPPED_RELU,
-					  CUDNN_PROPAGATE_NAN, 2.0));
-
-  checkCUDNN(cudnnActivationForward(cudnnHandle, reluDesc, &alpha,
-				    input->tensor_half_desc, input->gpu_half_data, &beta,
-				    input->tensor_half_desc, input->gpu_half_data));
-
-
-  profileEvent("H2F_start");
-  // NOTE: Transforming half precision output to single precision
-
-  convertToFP32_offline(input);
-  
-  profileEvent("H2F_end");
-
-  profileEvent("#tensorHalfClippedRelu_end");
-
-
-  return input;
-}
-
-
-
-
-void* tensorHalfRelu(void* input_ptr){
-
-  INFO("*** TensorHalfRelu \n");
-  profileEvent("#Relu");
-
-  Tensor* input = (Tensor*) input_ptr;
-
-  cudnnActivationDescriptor_t reluDesc;
-  float alpha = 1.0f, beta = 0.0f;
-  hostToDeviceCopy(input);
-
-
-  //**** Floating point to half conversions
-  profileEvent("F2H_start");
-
-  convertToFP16(input);
-	    
-  profileEvent("F2H_end");
-  /*** End of data type conversion **/
-
-
-  checkCUDNN(cudnnCreateActivationDescriptor(&reluDesc));
-
-  checkCUDNN(cudnnSetActivationDescriptor(reluDesc, CUDNN_ACTIVATION_RELU,
-					  CUDNN_PROPAGATE_NAN, 0.0));
-
-  checkCUDNN(cudnnActivationForward(cudnnHandle, reluDesc, &alpha,
-				    input->tensor_half_desc, input->gpu_half_data, &beta,
-				    input->tensor_half_desc, input->gpu_half_data));
-
- 
-  profileEvent("H2F_start");
-
-  convertToFP32_offline(input);
-  
-  profileEvent("H2F_end");
-
-  
-  profileEvent("#tensorHalfRelu_end");
-
-  
-  return input;
-}
-
-
-
-
-
-
-void* tensorHalfTanh(void* input_ptr){
-
-  INFO("*** TensorHalfTanh \n");
-  profileEvent("#Tanh");
-
-
-  Tensor* input = (Tensor*) input_ptr;
-
-  cudnnActivationDescriptor_t tanhDesc;
-  float alpha = 1.0f, beta = 0.0f;
-  hostToDeviceCopy(input);
-
-
-  //**** Data conversion from float to half
-  profileEvent("F2H_start");
-
-  convertToFP16(input);
-  
-  profileEvent("F2H_end");
-  /**** End of data type conversion ****/
-
-
-  checkCUDNN(cudnnCreateActivationDescriptor(&tanhDesc));
-
-  checkCUDNN(cudnnSetActivationDescriptor(tanhDesc, CUDNN_ACTIVATION_TANH,
-					  CUDNN_PROPAGATE_NAN, 0.0));
-
-  checkCUDNN(cudnnActivationForward(cudnnHandle, tanhDesc, &alpha,
-				    input->tensor_half_desc, input->gpu_half_data, &beta,
-				    input->tensor_half_desc, input->gpu_half_data));
-
-  profileEvent("H2F_start");
-
-  convertToFP32_offline(input);
-  
-  profileEvent("H2F_end");
-
-  
-  profileEvent("#tensorHalfTanh_end");
-
-
-  return input;
-}
-
-
-
-void* tensorHalfAdd(void* x_ptr, void* bias_ptr){
-
-  Tensor* x = (Tensor*) x_ptr;
-  Tensor* bias = (Tensor*) bias_ptr;
-
-  INFO("*** TensorHalfAdd \n");
-  profileEvent("#Add");
-
-  float alpha = 1.0f;
-  // float beta = 0.0f;
-  hostToDeviceCopy(x);
-  hostToDeviceCopy(bias);
-
-
-  //**** Data conversion from float to half
-  profileEvent("F2H_start");
-
-  convertToFP16(x);
-  convertToFP16(bias);
-  
-  profileEvent("F2H_end");
-  /*** End of data type conversions ****/
-
-
-  // FIXIT: routine fails for 3D tensors
-  checkCUDNN(cudnnAddTensor(cudnnHandle, &alpha, bias->tensor_half_desc,
-			    bias->gpu_half_data, &alpha,
-			    x->tensor_half_desc, x->gpu_half_data));
-
-
-  profileEvent("H2F_start");
-
-  convertToFP32_offline(x);
-  
-  profileEvent("H2F_end");
-
-  
-  profileEvent("#tensorHalfAdd_end");
-
-
-  return x;
-}
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/hpvm-rt-controller.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/hpvm-rt-controller.h
deleted file mode 100644
index af107016a5b8009fef0622487a285ac38a437ad4..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/hpvm-rt-controller.h
+++ /dev/null
@@ -1,1397 +0,0 @@
-
-#ifndef LLVM_HPVM_RT_CONTROLLER_H
-#define LLVM_HPVM_RT_CONTROLLER_H
-
-#include <fstream>
-#include <iostream>
-#include <map>
-#include <vector>
-#include <cstring>
-#include <cstdlib>
-
-#include "configuration.h"
-
-#include "profiler.h"
-#include "promise_timing_model.h"
-#include <ctime>
-
-#include <sys/stat.h>
-
-#define ACTIVE_PROFILING
-
-/*
- * Check if a file exists
- * Return true if the file exists, false else
- */
-bool fileExists(const std::string& file) {
-  struct stat buf;
-  return (stat(file.c_str(), &buf) == 0);
-}
-
-class ProfileInfo {
-  private:
-  // Members
-  double time_total;       // Total execution time of application
-  double time_compute;     // Compute
-  double time_control;     // Control
-  double time_config;      // Apply configuration
-
-  double energy_total;     // Total energy consumed by applcation
-  double energy_compute;   // Compute
-  double energy_control;   // Control
-  double energy_config;    // Apply configuration
-
-  // Execution time of one loop iteration
-  double time_compute_current_iteration;   // Compute
-  double time_control_current_iteration;   // Control
-  double time_config_current_iteration;    // Apply configuration
-
-  // Energy comsumed by one loop iteration
-  double energy_compute_current_iteration; // Compute
-  double energy_control_current_iteration; // Control
-  double energy_config_current_iteration;  // Apply configuration
-
-  // Vectors, where compute time and energy information
-  // - for each loop iteration (outer vector)
-  // - per operation (inner vector)
-  //                 (tensor operation for GPU, or whole layer for PROMISE)
-  // is stored
-  std::vector< std::vector< std::pair< std::string, double > > > tensor_time_info;
-  std::vector< std::vector< std::pair< std::string, double > > > tensor_energy_info;
-
-  // Vectors, where total compute time and energy information per iteration are stored
-  std::vector< double > compute_time_info;
-  std::vector< double > compute_energy_info;
-
-  // Vectors, where control time and energy information per iteration are stored
-  std::vector< double > control_time_info;
-  std::vector< double > control_energy_info;
-
-  // Vectors, where control time and energy information per iteration are stored
-  std::vector< double > config_time_info;
-  std::vector< double > config_energy_info;
-
-  bool in_iteration;
-
-  // Set to the path of the file where results will be written by printToFile.
-  std::string out_file_name;
-
-  // Functions
-  void resetCurrentIterationTime() {
-    time_compute_current_iteration = 0.0;
-    time_control_current_iteration = 0.0;
-    time_config_current_iteration = 0.0;
-  }
-
-  void resetCurrentIterationEnergy() {
-    energy_compute_current_iteration = 0.0;
-    energy_control_current_iteration = 0.0;
-    energy_config_current_iteration = 0.0;
-  }
-
-  void start_iteration() {
-    if (!in_iteration) {
-      resetCurrentIterationTime();
-      resetCurrentIterationEnergy();
-      tensor_time_info.push_back(std::vector< std::pair< std::string, double > > ());
-      tensor_energy_info.push_back(std::vector< std::pair< std::string, double > > ());
-      in_iteration = true;
-    }
-  }
-
-  public:
-  void end_iteration() {
-    // Update time counters
-    time_compute += time_compute_current_iteration;
-    time_control += time_control_current_iteration;
-    time_config  += time_config_current_iteration;
-
-    time_total += (time_compute_current_iteration +
-                   time_control_current_iteration +
-                   time_config_current_iteration);
-
-    // Update energy counters
-    energy_compute += energy_compute_current_iteration;
-    energy_control += energy_control_current_iteration;
-    energy_config  += energy_config_current_iteration;
-
-    energy_total += (energy_compute_current_iteration +
-                     energy_control_current_iteration +
-                     energy_config_current_iteration);
-
-    // Save current iteration counters
-    compute_time_info.push_back(time_compute_current_iteration);
-    compute_energy_info.push_back(energy_compute_current_iteration);
-    control_time_info.push_back(time_control_current_iteration);
-    control_energy_info.push_back(energy_control_current_iteration);
-    config_time_info.push_back(time_config_current_iteration);
-    config_energy_info.push_back(energy_config_current_iteration);
-
-    // Note end of iteration
-    in_iteration = false;
-  }
-
-  void addToCurrentIterationComputeTime(const char *s, double t) {
-    start_iteration();
-    time_compute_current_iteration += t;
-    tensor_time_info.back().push_back(std::make_pair(std::string(s), t));
-  }
-
-  void addToCurrentIterationControlTime(double t) {
-    start_iteration();
-    time_control_current_iteration += t;
-  }
-
-  void addToCurrentIterationConfigTime(double t) {
-    start_iteration();
-    time_config_current_iteration += t;
-  }
-
-  void addToCurrentIterationComputeEnergy(const char *s, double e) {
-    start_iteration();
-    energy_compute_current_iteration += e;
-    tensor_energy_info.back().push_back(std::make_pair(std::string(s), e));
-  }
-
-  void addToCurrentIterationControlEnergy(double e) {
-    start_iteration();
-    energy_control_current_iteration += e;
-  }
-
-  void addToCurrentIterationConfigEnergy(double e) {
-    start_iteration();
-    energy_config_current_iteration += e;
-  }
-
-  double getTotalTime() {
-    return time_total;
-  }
-
-  double getTotalEnergy() {
-    return energy_total;
-  }
-
-  double getCurrentIterationComputeTime() {
-    return time_compute_current_iteration;
-  }
-
-  double getCurrentIterationComputeEnergy() {
-    return energy_compute_current_iteration;
-  }
-
-  void set_out_file_name(std::string &str) {
-    out_file_name = str;
-  }
-
-  void printToFile() {
-
-    INFO("Writing Runtime Profile Info File...\n");
-
-    std::ofstream s_out(out_file_name.c_str());
-
-    if (!s_out) {
-      ERROR("Failed to open output file.");
-      abort();
-    }
-
-    // By construction, tensor_time_info and tensor_energy_info are expected 
-    // to have equal sizes, in outer and inner vectors both,
-    // and all time_info and energy_info vectors must have the same size.
-    unsigned iterations = tensor_time_info.size();
-    CUSTOM_ASSERT((tensor_time_info.size() == iterations) &&
-                  (tensor_energy_info.size() == iterations) &&
-                  (control_time_info.size() == iterations) &&
-                  (control_energy_info.size() == iterations) &&
-                  (config_time_info.size() == iterations) &&
-                  (config_energy_info.size() == iterations) &&
-                  "time_info and energy_info size: \
-                   iteration number does not match.");
-
-    for (unsigned i = 0; i < tensor_time_info.size(); i++ ) {
-      // time_info.size() == energy_info.size(), since we passed the assertion
-      s_out << "Iteration " << i << "\n";
-      
-      CUSTOM_ASSERT((tensor_time_info[i].size() == tensor_energy_info[i].size()) &&
-                    "time_info and energy_info size: operation number does not match.");
-      for (unsigned j = 0; j < tensor_time_info[i].size(); j++) {
-        // time_info[i].size() == energy_info[i].size(), we passed the assertion
-        CUSTOM_ASSERT((tensor_time_info[i][j].first == tensor_energy_info[i][j].first) &&
-                      "time_info and energy_info: operation does not match.");
-        s_out << tensor_time_info[i][j].first << " "
-              << tensor_time_info[i][j].second << " "
-              << tensor_energy_info[i][j].second << "\n";
-      }
-
-      s_out << "\nIteration Compute Time  : " << compute_time_info[i] << "\n";
-      s_out << "Iteration Compute Energy: " << compute_energy_info[i] << "\n";
-      s_out << "Iteration Control Time  : " << control_time_info[i] << "\n";
-      s_out << "Iteration Control Energy: " << control_energy_info[i] << "\n";
-      s_out << "Iteration Config Time  : " << config_time_info[i] << "\n";
-      s_out << "Iteration Control Energy: " << config_energy_info[i] << "\n\n\n";
-
-    }
-    s_out << "\n\nTotal Compute Time  : " << time_compute << "\n";
-    s_out << "Total Compute Energy: " << energy_compute << "\n";
-
-    s_out << "\nTotal Control Time  : " << time_control << "\n";
-    s_out << "Total Control Energy: " << energy_control << "\n";
-
-    s_out << "\nTotal Config Time  : " << time_config << "\n";
-    s_out << "Total Config Energy: " << energy_config << "\n";
-
-    s_out << "\nTotal Time  : " << time_total << "\n";
-    s_out << "Total Energy: " << energy_total << "\n";
-
-    s_out.close();
-
-    INFO("Done writing profile.\n");
- 
-  }
-
-  ProfileInfo() : time_total(0.0), energy_total(0.0),
-                  time_compute_current_iteration(0.0),
-                  time_control_current_iteration(0.0),
-                  time_config_current_iteration(0.0),
-                  energy_compute_current_iteration(0.0),
-                  energy_control_current_iteration(0.0),
-                  energy_config_current_iteration(0.0),
-                  in_iteration(false) {}
-
-};
-
-class Slowdowns {
-  private:
-  std::vector<float> slowdowns;
-  unsigned idx;
-
-  public:
-    Slowdowns() {
-      idx = 0;
-
-      std::ifstream s_in("slowdowns.txt");
-      if (!s_in) {
-        DEBUG("slowdowns file not found. Initializing slowdowns randomly.\n");
-        for (unsigned i = 0; i < 10; i++) {
-          slowdowns.push_back( 1.0 + (rand()/(RAND_MAX/(5.0-1.0))) );
-        }
-      } else {
-        for (std::string line; std::getline(s_in, line); ) {
-          float s = std::stof(line);
-          slowdowns.push_back(s);
-        }
-      }
-    }
-
-  unsigned getSlowdownsNumber() {
-    return slowdowns.size();
-  }
-
-  float getNextSlowdown() {
-    float tmp = slowdowns[idx];
-    idx = (idx + 1) % slowdowns.size();
-    return tmp;
-  }
-
-};
-
-class RuntimeController;
-
-RuntimeController *RC;
-
-class RuntimeController {
-  private:
-  // Members
-  // Map from node names to quantization ranges
-  std::map<std::string, std::vector<float> > QuantizationMap;
-
-  // Configurations.
-  // Configurations initially read - all generated from autotuner
-  std::vector<struct Configuration> InitialConfigurations;
-
-  // The ones in non dominated set (of pareto optimal points)
-  // for accuracy loss-speedup
-  std::vector<struct Configuration *> SpeedupConfigurations;
-  // The ones in non dominated set (of pareto optimal points)
-  // for accuracy loss-energy
-  std::vector<struct Configuration *> EnergyConfigurations;
-  // The ones in non dominated set (of pareto optimal points)
-  // for accuracy loss-speedup-energy
-  std::vector<struct Configuration *> ThreeDCurveConfigurations;
-
-  std::vector<struct Configuration *> *Configurations;
-  unsigned configurationIdx = 0;
-
-  double baseline_time = 0.0; // Execution time of baseline configuration
-  Slowdowns *slowdowns;
-
-  /*** Objects used to gather timing and energy information for execution ***/
-  ProfileInfo *PI;
-  Profiler *profiler;
-  Promise *promise;
-
-  //Functions
-
-  // Private functions of profiler
-  void start_profiler() {
-    if (profiler)
-      profiler->start_profiler();
-  }
-  void stop_profiler() {
-    if (profiler)
-      profiler->stop_profiler();
-  }
-
-  void setProfileInfoFilename(const char *);
-  void readQuantizationFile(const char *);
-  void readConfigurationFile(const char *);
-
-  void computeParetoConfigurationPoints();
-  void compute3DParetoConfigurationPoints();
-
-  public:
-  // For testing purposes only - do not use widely
-  std::vector<struct Configuration *> &getSpeedupConfigurations() {
-    return SpeedupConfigurations;
-  }
-  // For testing purposes only - do not use widely
-  std::vector<struct Configuration *> &getEnergyConfigurations() {
-    return EnergyConfigurations;
-  }
-  // For testing purposes only - do not use widely
-  std::vector<struct Configuration *> &getThreeDCurveConfigurations() {
-    return ThreeDCurveConfigurations;
-  }
-  // For testing purposes only - do not use widely
-  unsigned getConfigurationIdx() {
-    return configurationIdx;
-  }
-
-  std::vector<float> &getQuantizationRanges(const char *data) {
-    std::string s(data);
-    // All nodes are expected to have quantization ranges
-    return QuantizationMap.at(s);
-  }
-
-  NodeConfiguration *getNodeConfiguration(const char *data) {
-    std::string s(data);
-    // All nodes are expected to have a configuration
-    return (*Configurations)[configurationIdx]->setup.at(s);
-  }
-
-  // Functions for runtime control
-  void findNextConfiguration();
-  void findTargetConfiguration(float, enum SEARCH_KIND);
-  float getGoalSpeedup();
-  double getBaselineTime();
-  Slowdowns *getSlowdowns();
-
-  void init(const char *Cstr, const char *Qstr) {
-    // We initialize the path to the profile info output file,
-    // based on the path given for the configuration file
-    setProfileInfoFilename(Cstr);
-
-    readQuantizationFile(Qstr);
-    readConfigurationFile(Cstr);
-    Configurations = NULL;
-    computeParetoConfigurationPoints();
-//    compute3DParetoConfigurationPoints(); Not using 3D curve
-    INFO("Speedup Configurations\n");
-    printConfigurations(SpeedupConfigurations);
-//    INFO("Energy Configurations\n");
-//    printConfigurations(EnergyConfigurations);
-//    INFO("3D Configurations\n");
-//    printConfigurations(ThreeDCurveConfigurations);
-    configurationIdx = 0; //TODO: initialize using pareto curve - findTargetConfiguration ?
-    Configurations = &SpeedupConfigurations;
-
-    // Initializations for different runtime control strategies
-    srand(static_cast <unsigned> (time(0)));
-    slowdowns = new Slowdowns();
-
-    // Start profiling thread in the background, ready to time
-    start_profiler();
-    pause_profiler();
-    reset_profiler();
- }
-
-  // Exposing functionality of ProfileInfo
-  void end_iteration() {
-    if (PI)
-      PI->end_iteration();
-  }
-
-  void addToCurrentIterationComputeTime(const char *s, double t) {
-    if (PI)
-      PI->addToCurrentIterationComputeTime(s, t);
-  }
-
-  void addToCurrentIterationControlTime(double t) {
-    if (PI)
-      PI->addToCurrentIterationControlTime(t);
-  }
-
-  void addToCurrentIterationConfigTime(double t) {
-    if (PI)
-      PI->addToCurrentIterationConfigTime(t);
-  }
-
-  void addToCurrentIterationComputeEnergy(const char *s, double e) {
-    if (PI)
-      PI->addToCurrentIterationComputeEnergy(s, e);
-  }
-
-  void addToCurrentIterationControlEnergy(double e) {
-    if (PI)
-      PI->addToCurrentIterationControlEnergy(e);
-  }
-
-  void addToCurrentIterationConfigEnergy(double e) {
-    if (PI)
-      PI->addToCurrentIterationConfigEnergy(e);
-  }
-
-  double getCurrentIterationComputeTime() {
-    return (PI ? PI->getCurrentIterationComputeTime() : 0.0) ;
-  }
-
-  double getCurrentIterationComputeEnergy() {
-    return (PI ? PI->getCurrentIterationComputeEnergy() : 0.0) ;
-  }
-
-  void writeProfileInfo() {
-    if (PI)
-      PI->printToFile();
-  }
-
-  // Exposing functionality of (gpu) profiler
-  void resume_profiler() {
-    if (profiler)
-      profiler->resume_profiler();
-  }
-
-  void pause_profiler() {
-    if (profiler)
-      profiler->pause_profiler();
-  }
-
-  void reset_profiler() {
-    if (profiler)
-      profiler->reset();
-  }
-
-  std::pair<double, double> get_time_energy() const {
-    return (profiler ? profiler->get_time_energy()
-                     : std::make_pair(0.0, 0.0)) ;
-  }
-
-  // Exposing functionality of promise simulator
-  std::pair<double, double> fc_profile(const unsigned num_rows_a,
-                            const unsigned num_cols_a,
-                            const unsigned num_rows_b,
-                            const unsigned num_cols_b,
-                            const unsigned voltage_swing,
-                            const unsigned patch_factor) {
-    return (promise ? promise->fc_profile(num_rows_a,
-                                          num_cols_a,
-                                          num_rows_b,
-                                          num_cols_b,
-                                          voltage_swing,
-                                          patch_factor)
-                    : std::make_pair(0.0, 0.0)) ;
-  }
-
-  std::pair<double, double> conv_profile(const unsigned n,
-                            const unsigned c,
-                            const unsigned h,
-                            const unsigned w,
-                            const unsigned c_out,
-                            const unsigned c_in,
-                            const unsigned k_h,
-                            const unsigned k_w,
-                            const unsigned s_h,
-                            const unsigned s_w,
-                            const unsigned voltage_swing,
-                            const unsigned patch_factor) {
-    return (promise ? promise->conv_profile(n, c, h, w,
-                                            c_out, c_in, k_h, k_w,
-                                            s_h, s_w,
-                                            voltage_swing, patch_factor)
-                    : std::make_pair(0.0, 0.0)) ;
-  }
-
-  // Constructor and descructor
-  RuntimeController() {
-    configurationIdx = 0;
-#ifdef ACTIVE_PROFILING
-    PI = new ProfileInfo();
-    profiler = new Profiler();
-    promise = new Promise();
-#else
-    PI = NULL;
-    profiler = NULL;
-    promise = NULL;
-#endif
-
-  }
-
-  ~RuntimeController() {
-
-    stop_profiler();
-    writeProfileInfo();
-
-    if (PI) {
-      delete PI;
-    }
-    if (profiler) {
-      delete profiler;
-    }
-    if (promise) {
-      delete promise;
-    }
-
-    for (std::vector<struct Configuration>::iterator it = InitialConfigurations.begin(),
-	   ie = InitialConfigurations.end(); it != ie; ++it) {
-      std::map<std::string, NodeConfiguration * > ConfSetup = it->setup;
-      for (std::map<std::string, NodeConfiguration* >::const_iterator it = ConfSetup.begin();
-	   it != ConfSetup.end(); ++it) {
-        delete it->second;
-      }
-    }
-  // Handle freeing memory, for all configurations
-  // A way to do that is to not free the initial configurations in the pareto curve,
-  // and free all at once in the end
-  // This is done because configurations are stored in different containers, but
-  // share the node setup
-  }
-
-  // Helper Functions
-  void printQuantizationMap();
-  void printConfigurations(std::vector<struct Configuration> &);
-  void printConfigurations(std::vector<struct Configuration *> &);
-
-};
-
-void RuntimeController::setProfileInfoFilename(const char *str) {
-
-  if (PI) {
-    std::string file_path = std::string(str);
-    size_t idx = file_path.find_last_of("/");
-    file_path.erase(idx + 1);
-    file_path.append("profile_info_");
-
-    bool found = false;
-    std::string profile_filename;
-    for (unsigned i = 0; !found; i++) {
-      profile_filename = file_path;
-      profile_filename.append(std::to_string(i));
-      profile_filename.append(".txt");
-      found = !fileExists(profile_filename);
-    }
-
-    PI->set_out_file_name(profile_filename);
-  }
-}
-
-#define NODE_NAME_BUFFER_SIZE 10
-  
-void RuntimeController::readQuantizationFile(const char *str) {
-
-  INFO("Reading Quantization Ranges File...\n");
- 
-  if (std::string(str).empty()) {
-    INFO("Empty quantization file string.\n");
-    return;
-  } 
-
-  std::ifstream qin(str);
-
-  if (!qin) {
-    ERROR("Failed to open PROMISE quantization file.");
-    abort();
-  }
-
-  while (!qin.eof()) {
-    char NodeName[NODE_NAME_BUFFER_SIZE];
-    std::vector<float> QuantRangeVector;
-
-    qin >> NodeName;
-    
-    float qrange;
-    for (unsigned i = 0; i < 8; i++ ) {
-      qin >> qrange;
-      QuantRangeVector.push_back(qrange);
-    }
-    // See if we need to insert this in map instead - my lookup test seemed to work without it
-    // std::string s(NodeName);
-    QuantizationMap.insert(std::pair< std::string, std::vector<float> > (NodeName,
-									 QuantRangeVector));
-  }
-
-  qin.close();
-  INFO("DONE.\n");
-}
-
-void RuntimeController::printQuantizationMap() {
-
-  DEBUG("Quantization Ranges Map:\n");
-
-  for  (std::map<std::string, std::vector<float> >::const_iterator it = QuantizationMap.begin();
-	it != QuantizationMap.end(); ++it) {
-    DEBUG("%s :", it->first.c_str());
-
-    for (unsigned i = 0; i < it->second.size() ; i++) {
-      DEBUG(" %f", it->second[i]);
-    }
-
-    DEBUG("\n");
-  }
-
-}
-
-
-void RuntimeController::readConfigurationFile(const char *str) {
-
-  INFO("Reading Configuration File...\n");
-
-  std::ifstream qin(str);
-
-  if (!qin) {
-    ERROR("Failed to open configuration file.");
-    abort();
-  }
-
-  bool readingConfiguration = false;
-  bool readingFirstLine = false;
-
-  // Read baseline_time from first line of configuration file
-  std::string first_line;
-  std::getline(qin, first_line);
-  DEBUG("first_line: %s\n", first_line.c_str());
-  baseline_time = std::stod(first_line);
-  DEBUG("Baseline time: %lf\n\n", baseline_time);
-
-  for (std::string line; std::getline(qin, line); ) {
-    DEBUG("line: %s\n", line.c_str());
-
-    // Tokenize using ' ' as delimiter
-    // Vector to store tokens
-    std::vector<std::string> tokens;
-
-    for (auto i = strtok(&line[0], " "); i != NULL; i = strtok(NULL, " "))
-      tokens.push_back(i);
-
-    for (unsigned i = 0; i < tokens.size(); i++ )
-      DEBUG("t: %s\n", tokens[i].c_str());
-
-    DEBUG("\n");
-
-    if (tokens[0] == "+++++") { // Found new configuration start token
-      // Mark the start of a new configuration
-      readingConfiguration = true;
-      readingFirstLine = true;
-      continue;
-    }
-
-    if (tokens[0] == "-----") { // Found configuration end token
-      readingConfiguration = false;
-      // Mark the end of current configuration
-      continue;
-    }
-
-    if (readingFirstLine) {
-      // Read first line, to create the new configuration struct
-      readingFirstLine = false;
-      InitialConfigurations.push_back(Configuration(tokens[0],
-                                      std::stof(tokens[1]),
-                                      std::stof(tokens[2]),
-                                      std::stof(tokens[3]),
-                                      std::stof(tokens[4])));
-      continue;
-    }
-
-    if (tokens[1] == "promise") {
-      DEBUG("Found promise configuration\n");
-
-      // There must be at least one approximation option
-      CUSTOM_ASSERT((tokens.size() >= 2) && "Not enough approximation options.");
-
-      PROMISENodeConfiguration *NodeConf = new PROMISENodeConfiguration();
-      InitialConfigurations.back().setup.insert(std::make_pair(tokens[0], NodeConf));
-
-      // In increments of two, to handle pairs of approx option - tunable parameter
-      for (unsigned idx = 2; idx < tokens.size(); idx += 2) {
-	if (tokens[idx] == "swing_level") {
-	  DEBUG("Found swing voltage option\n");
-	  int vswing = std::stoi(tokens[idx+1]);
-	  DEBUG("vswing: %d\n", vswing);
-	  NodeConf->pushNewApproximationChoice(PROMISENodeConfiguration::APPROX::SWING_LEVEL,
-					       vswing);
-	}
-	// TODO: other approximation options handled here
-      }
-
-    } else if (tokens[1] == "gpu") {
-      DEBUG("Found gpu configuration\n");
-
-      // There must be at least one operation, with an approximation option
-      CUSTOM_ASSERT((tokens.size() >= 5) && "Not enough operations - approximation options.");
-
-      GPUNodeConfiguration *NodeConf = new GPUNodeConfiguration();
-      InitialConfigurations.back().setup.insert(std::make_pair(tokens[0], NodeConf));
-
-      unsigned idx = 2;
-      while (idx < tokens.size()) {
-	if (tokens[idx] == "add") {
-	  DEBUG("Found add operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::ADD);
-	  idx++;
-	} else if (tokens[idx] == "batchnorm") {
-	  DEBUG("Found batchnorm operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::BATCHNORM);
-	  idx++;
-	} else if (tokens[idx] == "conv") {
-	  DEBUG("Found conv operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::CONV);
-	  idx++;
-	} else if (tokens[idx] == "group_conv") {
-	  DEBUG("Found group_conv operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::GROUP_CONV);
-	  idx++;
-	} else if (tokens[idx] == "mul") {
-	  DEBUG("Found mul operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::MUL);
-	  idx++;
-	} else if (tokens[idx] == "relu") {
-	  DEBUG("Found relu operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::RELU);
-	  idx++;
-	} else if (tokens[idx] == "clipped_relu") {
-	  DEBUG("Found clipped_relu operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::CLIPPED_RELU);
-	  idx++;
-	} else if (tokens[idx] == "tanh") {
-	  DEBUG("Found tanh operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::TANH);
-	  idx++;
-	} else if (tokens[idx] == "pool_max") {
-	  DEBUG("Found pool_max operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::POOL_MAX);
-	  idx++;
-	} else if (tokens[idx] == "pool_mean") {
-	  DEBUG("Found pool_mean operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::POOL_MEAN);
-	  idx++;
-	} else if (tokens[idx] == "pool_min") {
-	  DEBUG("Found pool_min operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::POOL_MIN);
-	  idx++;
-	} else if (tokens[idx] == "softmax") {
-	  DEBUG ("Found softmax operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::SOFTMAX);
-	  idx++;
-	} else if (tokens[idx] == "fft") {
-	  DEBUG ("Found fft operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::FFT);
-	  idx++;
-	} else if (tokens[idx] == "reduce") {
-	  DEBUG ("Found reduce operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::REDUCE);
-	  idx++;
-	} else if (tokens[idx] == "projectiveT") {
-	  DEBUG ("Found projectiveT operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::PROJECTIVE_T);
-	  idx++;
-	} else if (tokens[idx] == "map1") {
-	  DEBUG ("Found map1 operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::MAP1);
-	  idx++;
-	} else if (tokens[idx] == "map2") {
-	  DEBUG ("Found map2 operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::MAP2);
-	  idx++;
-	} else if (tokens[idx] == "map3") {
-	  DEBUG ("Found map3 operation\n");
-	  NodeConf->pushNewTensorOperation(GPUNodeConfiguration::TENSOR_OP::MAP3);
-	  idx++;
-	} else /*Not a new operation. This means an approximation option*/
-	  if (tokens[idx] == "fp32") {
-	    DEBUG("Found fp32 option\n");
-	    int fp32 = std::stoi(tokens[idx+1]);
-	    DEBUG("fp32 parameter: %d, ignoring\n", fp32);
-	    NodeConf->pushNewApproximationChoiceForOperation(GPUNodeConfiguration::APPROX::FP32,
-							     fp32);
-	    idx += 2;
-	  } else if (tokens[idx] == "fp16") {
-	    DEBUG("Found fp16 option\n");
-	    int fp16 = std::stoi(tokens[idx+1]);
-	    DEBUG("fp16 parameter: %d, ignoring\n", fp16);
-	    NodeConf->pushNewApproximationChoiceForOperation(GPUNodeConfiguration::APPROX::FP16,
-							     fp16);
-	    idx += 2;
-	  } else if (tokens[idx] == "perf") {
-	    DEBUG("Found perf option\n");
-        int perf = std::stoi(tokens[idx+1]);
-	    DEBUG("perf parameter: %d\n", perf);
-        NodeConf->pushNewApproximationChoiceForOperation(GPUNodeConfiguration::APPROX::PERFORATION, perf);
-          idx += 2;
-        } else if (tokens[idx] == "samp") {
-	    DEBUG("Found samp option\n");
-        int samp = std::stoi(tokens[idx+1]);
-	    DEBUG("samp parameter: %d\n", samp);
-        NodeConf->pushNewApproximationChoiceForOperation(GPUNodeConfiguration::APPROX::INPUT_SAMPLING, samp);
-          idx += 2;
-        } else if (tokens[idx] == "red_samp") {
-	    DEBUG("Found red_samp option\n");
-        int red_samp = std::stoi(tokens[idx+1]);
-	    DEBUG("red_samp parameter: %d\n", red_samp);
-        NodeConf->pushNewApproximationChoiceForOperation(GPUNodeConfiguration::APPROX::REDUCTION_SAMPLING, red_samp);
-          idx += 2;
-        }
-	// TODO: other approximation options handled here
-
-      }
-
-    } else {
-      DEBUG ("Invalid Configuration File\n");
-      exit(1);
-    }
-
-  }
-
-  qin.close();
-  DEBUG("DONE.\n");
-
-}
-
-#define AL_THRESHOLD 0.1
-void RuntimeController::computeParetoConfigurationPoints() {
-
-  // Keep indices of pareto optimal points (configurations from
-  // InitialConfigurations vector that were copied to Configurations vector.)
-  // The others' setup pointer needs to be deleted
-  std::vector<unsigned> Indices;
-
-  // Sort the configurations according to accuracy loss
-  INFO("Sorting autotuner configurations...\n");
-  std::sort(InitialConfigurations.begin(),
-            InitialConfigurations.end(),
-            ConfigurationLessThan());
-  INFO("Done sorting.\n");
-
-  for (unsigned start_idx = 0; start_idx < InitialConfigurations.size(); ) {
-    // Points to first Configuration with different (higher) accuracy loss
-    // compared to the one pointed by start_idx 
-    unsigned end_idx = start_idx + 1;
-    while ((end_idx < InitialConfigurations.size()) &&
-           (InitialConfigurations[end_idx].accuracyLoss -
-            InitialConfigurations[start_idx].accuracyLoss < AL_THRESHOLD)) {
-      end_idx++;
-    }
-    DEBUG("start_idx = %d, end_idx = %d\n", start_idx, end_idx);
-    // Now, all elements in [start_idx, end_idx) have equal accuracy loss,
-    // that is lower from later ones.
-
-    // Find the best speedup and energy between them as well
-    float sp = -1.0; //FLT_MIN
-    unsigned sp_idx = 0;
-
-    float en = -1.0; //FLT_MIN
-    unsigned en_idx = 0;
-
-    for (unsigned i = start_idx; i < end_idx; i++) {
-      if (InitialConfigurations[i].speedup > sp) {
-        sp = InitialConfigurations[i].speedup;
-        sp_idx = i;
-      }
-      if (InitialConfigurations[i].energy > en) {
-        en = InitialConfigurations[i].energy;
-        en_idx = i;
-      }
-    }
-    DEBUG("accuracy loss = %f, speedup = %f, at sp_idx = %d\n",
-          InitialConfigurations[sp_idx].accuracyLoss, sp, sp_idx);
-    // Found best speedup for this accuracy point (not dominated by any of these).
-    DEBUG("accuracy loss = %f, energy = %f, at en_idx = %d\n",
-          InitialConfigurations[en_idx].accuracyLoss, en, en_idx);
-    // Found best energy for this accuracy point (not dominated by any of these).
-
-    // Now, we need to check that it is not dominated.
-    // - better accuracy loss of all in initial configurations out of
-    // start_idx, end_idx range
-    // - better or equal speedup to the ones within this range
-    // We only need to check the points already in Configurations, that have
-    // already been inserted in pareto frontier. These have better accuracy
-    // loss, so this one will only be added if it shows better speedup
-    // The one in curve with best speedup so far is the last one (with worst
-    // = highest accuracy loss), so compare only with that one.
-
-    // Similar handling of energy vector
-
-    bool sp_notDominated = true;
-    if (!SpeedupConfigurations.empty()) {
-      if (SpeedupConfigurations.back()->speedup >= sp)
-        sp_notDominated = false;
-    }
-
-    bool en_notDominated = true;
-    if (!EnergyConfigurations.empty()) {
-      if (EnergyConfigurations.back()->energy >= en)
-        en_notDominated = false;
-    }
-
-    DEBUG("sp_notDominated = %d\n", sp_notDominated);
-    DEBUG("en_notDominated = %d\n", en_notDominated);
-
-    // If not dominated, insert in pareto frontier set
-    if (sp_notDominated) {
-      SpeedupConfigurations.push_back(&InitialConfigurations[sp_idx]);
-    }
-    if (en_notDominated) {
-      EnergyConfigurations.push_back(&InitialConfigurations[en_idx]);
-    }
-
-    // Keep track of unnecessary configurations
-    for (unsigned i = start_idx; i < end_idx; i++) {
-      if (((i != sp_idx) || (!sp_notDominated)) &&
-          ((i != en_idx) || (!en_notDominated)))
-        Indices.push_back(i);
-    }
-
-    // Continue from next accuracy loss level
-    start_idx = end_idx;
-
-  }
-
-  // All elements in InitialConfigurations whose index is in Indices are no
-  // longer needed.
-//  for (std::vector<unsigned>::iterator idx_it = Indices.begin(), idx_e = Indices.end();
-//       idx_it != idx_e; ++idx_it) {
-//    std::map<std::string, NodeConfiguration * > ConfSetup =
-//      InitialConfigurations[*idx_it].setup;
-//    for (std::map<std::string, NodeConfiguration* >::const_iterator it = ConfSetup.begin();
-//     it != ConfSetup.end(); ++it) {
-//      delete it->second;
-//    }
-//  }
-//  InitialConfigurations.clear();
-
-}
-
-void RuntimeController::compute3DParetoConfigurationPoints() {
-
-  // Sort the configurations according to accuracy loss
-  INFO("Sorting autotuner configurations...\n");
-  std::sort(InitialConfigurations.begin(),
-            InitialConfigurations.end(),
-            ConfigurationLessThan());
-  INFO("Done sorting.\n");
-
-  for (unsigned start_idx = 0; start_idx < InitialConfigurations.size(); ) {
-    // Points to first Configuration with different (higher) accuracy loss
-    // compared to the one pointed by start_idx 
-    unsigned end_idx = start_idx + 1;
-    while ((end_idx < InitialConfigurations.size()) &&
-           (InitialConfigurations[end_idx].accuracyLoss -
-            InitialConfigurations[start_idx].accuracyLoss < AL_THRESHOLD)) {
-      end_idx++;
-    }
-    DEBUG("start_idx = %d, end_idx = %d\n", start_idx, end_idx);
-    // Now, all elements in [start_idx, end_idx) have equal accuracy loss,
-    // that is lower from later ones and worse than those already in curve
-    // (so they cannot displace them).
-
-    // Find candidates from [start_idx, end_idx) to be inserted
-    // Keep their indices. If a point is dominated (strictly worse),
-    // its index will not be inserted
-    std::vector<unsigned> Indices;
-
-    for (unsigned i = start_idx; i < end_idx; i++) {
-      bool dominated = false;
-      for (unsigned j = i+1; (j < end_idx) && !dominated; j++) {
-        if ((InitialConfigurations[i].speedup < InitialConfigurations[j].speedup) &&
-            (InitialConfigurations[i].energy < InitialConfigurations[j].energy)) {
-          dominated = true;
-        }
-      }
-      if (!dominated) {
-        DEBUG("accuracy loss = %f, speedup = %f, energy = %f, at idx = %d\n",
-              InitialConfigurations[i].accuracyLoss,
-              InitialConfigurations[i].speedup,
-              InitialConfigurations[i].energy,
-              i);
-        Indices.push_back(i);
-      }
-    }
-
-    for (std::vector<unsigned>::iterator idx_it = Indices.begin(), idx_e = Indices.end();
-         idx_it != idx_e; ++idx_it) {
-      Configuration &CandidateConfiguration = InitialConfigurations[*idx_it];
-
-      if (!ThreeDCurveConfigurations.empty()) {
-        bool notDominated = true;
-        for (unsigned i = 0; (i < ThreeDCurveConfigurations.size()) && notDominated; i++) {
-          if ((CandidateConfiguration.speedup <= ThreeDCurveConfigurations[i]->speedup) &&
-              (CandidateConfiguration.energy <= ThreeDCurveConfigurations[i]->energy)) {
-            // This configuration is not better, in at least one characteristic,
-            // compared to the existing ones in the curve.
-            notDominated = false;
-          }
-        }
-        if (notDominated) {
-          ThreeDCurveConfigurations.push_back(&CandidateConfiguration);
-        }
-      } else {
-        // If the curve is empty, we know that this is a point that must be
-        // inserted. It has the best accuracy loss, and belongs here because
-        // it is not dominated by any point in this accuracy range.
-        ThreeDCurveConfigurations.push_back(&CandidateConfiguration);
-      }
-    }
-
-    // Continue from next accuracy loss level
-    start_idx = end_idx;
-  }
-
-}
-
-
-void RuntimeController::printConfigurations(std::vector<struct Configuration> &Confs) {
-
-  for (std::vector<struct Configuration>::iterator it = Confs.begin(),
-       ie = Confs.end(); it != ie; ++it) {
-    it->print();
-  }
-
-}
-
-void RuntimeController::printConfigurations(std::vector<struct Configuration *> &Confs) {
-
-  for (std::vector<struct Configuration *>::iterator it = Confs.begin(),
-       ie = Confs.end(); it != ie; ++it) {
-    (*it)->print();
-  }
-
-}
-
-void RuntimeController::findNextConfiguration() {
-  configurationIdx = (configurationIdx + 1) % Configurations->size() ;
-  DEBUG("findNextConfiguration: Updated configurationIdx to %u.\n", configurationIdx);
-}
-
-void RuntimeController::findTargetConfiguration(float goal, enum SEARCH_KIND sk) {
-  // We search in range begin(), end()-1 . It is OK to decrement end(), because
-  // the configurations vector always points to one of the pareto curves, and
-  // they are never empty - we have always pushed at least one configuration.
-
-  DEBUG("findTargetConfiguration: goalVal: %f, search kind: %d.\n", goal, sk);
-  std::vector<struct Configuration *>::iterator low_it;
-  switch (sk) {
-    case SPEEDUP:
-      {
-        Configurations = &SpeedupConfigurations;
-        low_it = std::lower_bound(Configurations->begin(),
-                                  Configurations->end()-1,
-                                  goal,
-                                  ConfigurationLessThan_SP());
-        configurationIdx = low_it - Configurations->begin();
-        break;
-      }
-    case ENERGY:
-      {
-        Configurations = &EnergyConfigurations;
-        low_it = std::lower_bound(Configurations->begin(),
-                                  Configurations->end()-1,
-                                  goal,
-                                  ConfigurationLessThan_E());
-        configurationIdx = low_it - Configurations->begin();
-        break;
-      }
-    case ACCURACY_LOSS:
-      {
-        Configurations = &SpeedupConfigurations;
-        low_it = std::lower_bound(Configurations->begin(),
-                                  Configurations->end()-1,
-                                  goal,
-                                  ConfigurationLessThan_AL());
-        if ((*low_it)->accuracyLoss > goal)
-          --low_it;
-        configurationIdx = low_it - Configurations->begin();
-        break;
-      }
-    default:
-      {
-        CUSTOM_ASSERT(false && "Unknown search option for optimization target");
-        ERROR("Unknown search option for optimization target.");
-        abort();
-      }
-  }
-  // After search, low_it points to the Configuration to the element with the
-  // goal value or the immediately lower value if it does not exist
-
-  DEBUG("findTargetConfiguration: Updated configurationIdx to %u.\n", configurationIdx);
-
-}
-
-#define MAX_GOAL_SPEEDUP 9
-float RuntimeController::getGoalSpeedup() {
-  return 1.0 + (rand() / (RAND_MAX / (MAX_GOAL_SPEEDUP - 1.0) ) );
-}
-
-double RuntimeController::getBaselineTime() {
-  return baseline_time;
-}
-
-Slowdowns *RuntimeController::getSlowdowns() {
-  return slowdowns;
-}
-
-// Functions to be inserted with initializeTensorRT and clearTensorRT
-void llvm_hpvm_initializeRuntimeController(const char *ConfigFile, const char *QRangeFile) {
-  RC = new RuntimeController();
-  RC->init(ConfigFile, QRangeFile);
-  return;
-}
-
-void llvm_hpvm_clearRuntimeController() {
-  delete RC;
-  return;
-}
-
-//*** Methods to compute accuracy of a tensor by the runtime controller   ***//
-uint32_t* labels_from_file = NULL;
-
-uint32_t* hpvm_rt_readLabelsBatch_cached(const char* labels_file, int start, int end) {
-
-  // Initialize buffer
-  if (!labels_from_file) {
-    FILE* file = fopen(labels_file, "rb");
-    if (file == NULL) {
-      ERROR("Data file %s is not found. Aborting...\n", labels_file);
-      abort();
-    }
-    // Get number of labels
-    fseek(file, 0, SEEK_END);
-    long size = ftell(file);
-    fseek(file, 0, SEEK_SET); // return file pointer to beginning
-
-    // Allocate memory for labels
-    labels_from_file = (uint32_t*) malloc(size);
-    if (labels_from_file == NULL) {
-      ERROR("Memory allocation for labels unsucessfull. Aborting...\n");
-      abort();
-    }
-
-    // Copy the labels file into the allocated buffer
-    size_t result = fread(labels_from_file, 1, size, file);
-    if (result != size) {
-      // We did not read as many elemets as there are in the file
-      ERROR("Reading labels file unsucessfull. Aborting...\n");
-      abort();
-    }
-
-    fclose(file);
-  }
-
-//  int num_labels = end - start; 
-//  uint32_t* labels = (uint32_t*) malloc(sizeof(uint32_t) * num_labels);
-//  for (unsigned i = start; i < end; i++) {
-//    labels[i-start] = labels_from_file[i];
-//  }
-//  return labels;
-
-  // Return pointer to labels
-  return &labels_from_file[start];
-
-}
-
-//*** Copied from dnn_sources/include/utils.h                             ***//
-float hpvm_rt_computeAccuracy3(uint32_t* labels, void* result_ptr) {
-  
-  struct Tensor* result = (struct Tensor*) result_ptr;
-  
-  size_t batch_dim = result->dims.dim_sizes[0];
-  size_t num_classes = result->dims.dim_sizes[1];
-  float* data = (float*) result->host_data;
-  int num_errors = 0;
-
-  printf("batch_dim = %lu, num_classes = %lu \n", batch_dim, num_classes);
-  
-  for(int i = 0; i < batch_dim; i++){
-  
-    int chosen = 0;
-    for (int id = 1; id < num_classes; ++id){
-      if (data[i * num_classes + chosen] < data[i * num_classes + id]) chosen = id;
-    }
-    
-    if(chosen != labels[i])
-      num_errors++;
-  }
-
-  float accuracy = ((batch_dim - num_errors) * 1.0 / batch_dim * 1.0) * 100.0;
-  printf("****** Accuracy = %f \n\n", accuracy);
-
-  FILE* fp = fopen("final_accuracy", "w+");
-  if(fp != NULL){
-
-    std::ostringstream ss;
-    ss << std::fixed << accuracy;
-    std::string print_str = ss.str();
-  
-    fwrite(print_str.c_str(), 1, print_str.length(), fp);
-  }
-
-  fclose(fp);
-
-  return accuracy;    
-}
-
-#define llvm_hpvm_invokeRtControl_BASE llvm_hpvm_invokeRtControl
-
-void llvm_hpvm_invokeRtControl_BASE(void* result, const char* str, int start, int end) {
-
-  RC->resume_profiler();
-
-  uint32_t* labels_cached = hpvm_rt_readLabelsBatch_cached(str, start, end);
-  hpvm_rt_computeAccuracy3(labels_cached, result);
-
-  // Read stats for iteration that was just completed
-  double current_iteration_time = RC->getCurrentIterationComputeTime();
-  double current_iteration_energy = RC->getCurrentIterationComputeEnergy();
-
-  RC->pause_profiler();
-  std::pair<double, double> pinfo = RC->get_time_energy();
-  RC->reset_profiler();
-  RC->addToCurrentIterationControlTime(pinfo.first);
-  RC->addToCurrentIterationControlEnergy(pinfo.second);
-
-  INFO("current iteration time = %f, current iteration energy = %f\n\n",
-       current_iteration_time, current_iteration_energy);
-
-  // Note the end of iteration
-  RC->end_iteration();
-}
-
-void llvm_hpvm_invokeRtControl_ITERATE(void* result, const char* str, int start, int end) {
-
-  uint32_t* labels_cached = hpvm_rt_readLabelsBatch_cached(str, start, end);
-  hpvm_rt_computeAccuracy3(labels_cached, result);
-
-  // Read stats for iteration that was just completed
-  double current_iteration_time = RC->getCurrentIterationComputeTime();
-  double current_iteration_energy = RC->getCurrentIterationComputeEnergy();
-
-  RC->resume_profiler();
-  RC->findNextConfiguration();
-  // Still use findNext configuration, to update the configurationIdx,
-  // to point to next location
-  enum SEARCH_KIND k = ACCURACY_LOSS;
-  float goalVal = RC->getSpeedupConfigurations()[RC->getConfigurationIdx()]->accuracyLoss;
-  RC->findTargetConfiguration(goalVal, k);
-
-  RC->pause_profiler();
-  std::pair<double, double> pinfo = RC->get_time_energy();
-  RC->reset_profiler();
-  RC->addToCurrentIterationControlTime(pinfo.first);
-  RC->addToCurrentIterationControlEnergy(pinfo.second);
-
-  INFO("current iteration time = %f, current iteration energy = %f\n\n",
-       current_iteration_time, current_iteration_energy);
-
-  // Note the end of iteration
-  RC->end_iteration();
-}
-
-void llvm_hpvm_invokeRtControl_ADJUST(void* result, const char* str, int start, int end) {
-
-  uint32_t* labels_cached = hpvm_rt_readLabelsBatch_cached(str, start, end);
-  hpvm_rt_computeAccuracy3(labels_cached, result);
-
-  // Read stats for iteration that was just completed
-  double current_iteration_energy = RC->getCurrentIterationComputeEnergy();
-
-  RC->resume_profiler();
-  double current_iteration_time = RC->getCurrentIterationComputeTime();
-  double baseline_time = RC->getBaselineTime();
-  double target_speedup = current_iteration_time / baseline_time;
-  RC->findTargetConfiguration(target_speedup, SPEEDUP);
-  RC->pause_profiler();
-
-  std::pair<double, double> pinfo = RC->get_time_energy();
-  RC->reset_profiler();
-  RC->addToCurrentIterationControlTime(pinfo.first);
-  RC->addToCurrentIterationControlEnergy(pinfo.second);
-
-  INFO("current iteration time = %f, current iteration energy = %f\n",
-       current_iteration_time, current_iteration_energy);
-  INFO("target speedup = %lf\n\n", target_speedup);
-
-  // Note the end of iteration
-  RC->end_iteration();
-}
-
-void llvm_hpvm_invokeRtControl_SLOWDOWN(void* result, const char* str, int start, int end) {
-
-  uint32_t* labels_cached = hpvm_rt_readLabelsBatch_cached(str, start, end);
-  hpvm_rt_computeAccuracy3(labels_cached, result);
-
-  // Read stats for iteration that was just completed
-  double current_iteration_time = RC->getCurrentIterationComputeTime();
-  double current_iteration_energy = RC->getCurrentIterationComputeEnergy();
-
-  std::string prev_conf_name =
-    RC->getSpeedupConfigurations()[RC->getConfigurationIdx()]->name;
-
-  RC->resume_profiler();
-  float slowdown = RC->getSlowdowns()->getNextSlowdown();
-  RC->findTargetConfiguration(slowdown, SPEEDUP);
-  RC->pause_profiler();
-
-  std::pair<double, double> pinfo = RC->get_time_energy();
-  RC->reset_profiler();
-  RC->addToCurrentIterationControlTime(pinfo.first);
-  RC->addToCurrentIterationControlEnergy(pinfo.second);
-
-  std::string next_conf_name =
-    RC->getSpeedupConfigurations()[RC->getConfigurationIdx()]->name;
-  float next_conf_speedup =
-    RC->getSpeedupConfigurations()[RC->getConfigurationIdx()]->speedup;
-
-  INFO("current iteration time = %f, current iteration energy = %f\n",
-       current_iteration_time, current_iteration_energy);
-  INFO("slowdown (target speedup) = %f\n", slowdown);
-  INFO("Previous configuration: %s\n", prev_conf_name.c_str());
-  INFO("Swapping to next configuration: %s with speedup %f\n\n",
-       next_conf_name.c_str(), next_conf_speedup);
-
-  // Note the end of iteration
-  RC->end_iteration();
-}
-
-void llvm_hpvm_invokeRtControl_RAND(void* result, const char* str, int start, int end) {
-
-  uint32_t* labels_cached = hpvm_rt_readLabelsBatch_cached(str, start, end);
-  hpvm_rt_computeAccuracy3(labels_cached, result);
-
-  // Read stats for iteration that was just completed
-  double current_iteration_time = RC->getCurrentIterationComputeTime();
-  double current_iteration_energy = RC->getCurrentIterationComputeEnergy();
-
-  RC->resume_profiler();
-  RC->findTargetConfiguration(RC->getGoalSpeedup(), SPEEDUP);
-  RC->pause_profiler();
-
-  std::pair<double, double> pinfo = RC->get_time_energy();
-  RC->reset_profiler();
-  RC->addToCurrentIterationControlTime(pinfo.first);
-  RC->addToCurrentIterationControlEnergy(pinfo.second);
-
-  INFO("current iteration time = %f, current iteration energy = %f\n\n",
-       current_iteration_time, current_iteration_energy);
-
-  // Note the end of iteration
-  RC->end_iteration();
-}
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/img_tensor_runtime.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/img_tensor_runtime.h
deleted file mode 100644
index 9c098719e52e31fcd06b6425964c8e1d48a15210..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/img_tensor_runtime.h
+++ /dev/null
@@ -1,26 +0,0 @@
-#ifndef IMG_TENSOR_RUNTIME_H
-#define IMG_TENSOR_RUNTIME_H
-
-// ***                        Runtime declaration                        *** //
-void* tensorFft(void* input);
-void* tensorReduce(void* input);
-void* tensorReductionSamplingReduce(void* input);
-void* tensorProjectiveT(void* input);
-void* tensorMap1(void* input);
-void* tensorMap2(void* input);
-void* tensorMap3(void* input);
-
-// ***                      Wrapper API declaration                      *** //
-void* wrapper_tensorFft(const char* hpvm_node_id, void* input);
-void* wrapper_tensorReduce(const char* hpvm_node_id, void* input);
-void* wrapper_tensorProjectiveT(const char* hpvm_node_id, void* input);
-void* wrapper_tensorMap1(const char* hpvm_node_id, void* input);
-void* wrapper_tensorMap2(const char* hpvm_node_id, void* input);
-void* wrapper_tensorMap3(const char* hpvm_node_id, void* input);
-
-// Tentative
-void* wrapper_tensorStencil(const char* hpvm_node_id, void* input);
-void* wrapper_tensorCosineT(const char* hpvm_node_id, void* input);
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/op_overheads.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/op_overheads.h
deleted file mode 100644
index ddd718515a358cc122120e1f418233b8cf5947c6..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/op_overheads.h
+++ /dev/null
@@ -1,229 +0,0 @@
-
-
-#ifndef OP_OVERHEADS_HEADER
-#define OP_OVERHEADS_HEADER
-
-
-#include <math.h>
-#include <sstream>
-#include "tensor.h"
-
-
-float scale_down_factor = 10000.0;
-std::string result_str = "";
-
-
-// TODO: Every routine needs testing
-
-static float scaleDownComps(double total_comps){
-
-  total_comps = total_comps / scale_down_factor;
-  return total_comps;
-}
-
-// private function
-static float getScaledComps(double total_comps, int error_scale, int factor_type){
-
-  double scaled_comps;
-  
-  // Logarithmic error factor scaling - higher error, lower cost
-  if(factor_type == 1){   
-    float error_factor = log2((float) error_scale + 3);
-    scaled_comps = total_comps / error_factor;
-  }
-  // Linear error factor scaling
-  if(factor_type == 2){
-    scaled_comps = total_comps / (error_scale + 1); 
-  }
-  // Quadratic error factor scaling (scaling down)
-  if(factor_type == 3){
-    error_scale = (error_scale + 1) * (error_scale + 1);
-    scaled_comps = total_comps / error_scale; 
-  }
-
-  
-  return scaled_comps;
-}
-
-
-static void addNormToResult(float comps){
-
-  std::ostringstream ss;
-  ss << std::fixed << comps;
-  
-  result_str.append( std::string(ss.str()) );
-  result_str.append("\t");
-}
-
-
-
-static void addCompsToResult(float total_comps, float opt_comps1, float opt_comps2, float opt_comps3){
-
-  std::ostringstream ss;
-  ss << std::fixed << total_comps;
-  result_str.append( std::string(ss.str()) );
-  result_str.append("\t");
-
-  std::ostringstream ss2;
-  ss2 << std::fixed << opt_comps1;  
-  result_str.append( std::string(ss2.str()) );
-  result_str.append("\t");
-  
-  std::ostringstream ss3;
-  ss3 << std::fixed << opt_comps2;
-  result_str.append( std::string(ss3.str()) );
-  result_str.append("\t");
-
-  std::ostringstream ss4;
-  ss4 << std::fixed << opt_comps3;
-  result_str.append( std::string(ss4.str()) );
-  result_str.append("\n");
-}
-
-
-void dumpCompOverheads(double total_comps, int error_scale){
-
-  total_comps = scaleDownComps(total_comps);
-  
-  float scaled_comps1 = getScaledComps(total_comps, error_scale, 1); // Log scaling
-  float scaled_comps2 = getScaledComps(total_comps, error_scale, 2); // Linear scaling
-  float scaled_comps3 = getScaledComps(total_comps, error_scale, 3); // Quadratic scaling
- 
-  //INFO("error_scale = %d, total_comps = %f, scaled_comps = %f \n",
-  //	 error_scale, total_comps, scaled_comps1);
-
-  addCompsToResult(total_comps, scaled_comps1, scaled_comps2, scaled_comps3); 
-}
-
-
-
-void add_conv_overheads(void* input_ptr, void* filter_ptr,
-			int vertical_stride, int horizontal_stride,
-			int error_scale){
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* filter = (Tensor*) filter_ptr;
-
-  double kernel_comps = filter->dims.dim_sizes[0] * filter->dims.dim_sizes[1] *
-    filter->dims.dim_sizes[2] * filter->dims.dim_sizes[3];
-
-  double H_in = input->dims.dim_sizes[2] / vertical_stride;
-  double W_in = input->dims.dim_sizes[3] / horizontal_stride;
-  double N_in = input->dims.dim_sizes[0]; // batch Dimension
-
-  double total_comps = N_in * H_in * W_in * kernel_comps;
-
-  dumpCompOverheads(total_comps, error_scale);
-    
-}
-
-
-void add_gemm_overheads(void* lhs_ptr, void* rhs_ptr, int error_scale){
-
-  Tensor* lhs = (Tensor*) lhs_ptr;
-  Tensor* rhs = (Tensor*) rhs_ptr;
-    
-  int m = lhs->dims.dim_sizes[0];
-  // The rhs last dimension must contain the neurons
-  int n = rhs->dims.dim_sizes[rhs->dims.num_dims-1]; // output neurons
-  int k = 1;
-  
-  // Flattening the dimensions after the batch dimension
-  for (int j = 1 ; j < lhs->dims.num_dims; j++){
-    k = k * lhs->dims.dim_sizes[j]; // input neurons
-  }
-
-  int rhs_k = rhs->dims.dim_sizes[rhs->dims.num_dims-2];
-  // Dimension-note: Check if k is same across the two tensors
-  
-  //printf("m = %d, n = %d, k = %d \n", m, n, k);
-  
-  if(rhs_k != k){
-    printf("rhs=%d and lhs=%d columns/rows don't match", rhs_k, k);
-    abort();
-  }
-
-  double m_d = m;
-  double n_d = n;
-  double rhs_k_d = rhs_k;
-  
-  double total_comps = m_d * n_d * rhs_k_d * 1.0;
-  dumpCompOverheads(total_comps, error_scale);
-  
-}
-
-
-void add_bias_overheads(void* input_ptr, int error_scale){
-
-  Tensor* input = (Tensor*) input_ptr;  
-  double total_comps = input->num_elems;
-
-  dumpCompOverheads(total_comps, error_scale);
-  }
-
-
-void add_relu_overheads(void* input_ptr, int error_scale){
-  
-  Tensor* input = (Tensor*) input_ptr;
-  double total_comps = input->num_elems;
-
-  dumpCompOverheads(total_comps, error_scale);
-}
-
-
-void add_pool_overheads(void* input_ptr, int kernel_size,
-			 int stride_size, int error_scale){
-
-  Tensor* input = (Tensor*) input_ptr;
-  
-  int num_dims = input->dims.num_dims;
-  double H = input->dims.dim_sizes[num_dims-2];
-  double W = input->dims.dim_sizes[num_dims-1];
-  double C = input->dims.dim_sizes[1]; // channel dimension
-  double N = input->dims.dim_sizes[0]; // batch dimension
-
-  H = H / stride_size;
-  W = W / stride_size;
-
-  double total_comps = N * C * H * W * kernel_size * kernel_size;
-
-  dumpCompOverheads(total_comps, error_scale);
-
-}
-
-
-void add_norms(void* norms_ptr, char* op_name, int error_value){
-
-  // Print operation name - {tensorAdd, tensorPool, tensorGemm}
-  result_str.append(op_name);
-  result_str.append("\t");
-  
-  addNormToResult(error_value);
-  
-  Norm_t* norms = (Norm_t*) norms_ptr;
-
-  addNormToResult(norms->mean_l1);
-  addNormToResult(norms->mean_l2);
-  addNormToResult(norms->orig_inf_norm);
-  
-  addNormToResult(norms->l1_norm);
-  addNormToResult(norms->l2_norm);
-  addNormToResult(norms->inf_norm);
-}
-
-
-void dump_result(char* file_name){
-
-  FILE* fp = fopen(file_name, "w+");
-  if(fp != NULL){
-    fwrite(result_str.c_str(), 1, result_str.length(), fp);
-    fclose(fp);
-  }
-  else{
-    ERROR("Could not create file \n");
-  }
-
-  result_str = "";
-}
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/profiling.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/profiling.h
deleted file mode 100644
index 7075d51cd60b610379536ddaa1c2255a8fc68de0..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/profiling.h
+++ /dev/null
@@ -1,95 +0,0 @@
-
-#ifndef PROFILING_HEADER
-#define PROFILING_HEADER
-
-
-
-#include <stdio.h>
-#include <stdarg.h>
-#include <ctime>
-#include <chrono>
-#include <iostream>
-#include <map>
-#include <memory>
-#include <random>
-#include <string>
-#include <unordered_map>
-#include <cuda_runtime.h>
-
-#include "global_data.h"
-
-
-/***** Profiling routines ***/
-
-
-std::chrono::time_point<std::chrono::high_resolution_clock> start_time;
-// previous_time maintains time for the latest timed operation
-std::chrono::time_point<std::chrono::high_resolution_clock> previous_time;
-
-extern "C"{
-
-  void startProfiling(){
-    start_time = std::chrono::high_resolution_clock::now();
-  }
-
-  void stopProfiling(){
-    
-    FILE* fp = fopen("profile_data.txt", "w+");
-    if(fp != NULL){   
-      fwrite(profile_data.c_str(), 1, profile_data.length(), fp);
-      fclose(fp);
-    }
-    
-    profile_data = "";
-    func_counters.clear();
-  }
-
-
-  void profileEvent(const char* event_name, bool compare_previous = false){
-
-    checkCudaErrors(cudaDeviceSynchronize());
-
-    auto it = func_counters.find(event_name);
-    if(it == func_counters.end()){
-      func_counters[event_name] = 1; 
-    }
-    else{
-      int counter = func_counters[event_name];
-      counter++;
-      func_counters[event_name] = counter;
-    }
-
-    std::stringstream ss;
-    ss << func_counters[event_name];
-    std::string event_count = ss.str();
-
-  
-    std::chrono::time_point<std::chrono::high_resolution_clock> zero_time; 
-    std::chrono::time_point<std::chrono::high_resolution_clock> time_reading =
-      std::chrono::high_resolution_clock::now();
-    std::chrono::duration<double, std::ratio<1>> current_time =
-      time_reading - zero_time;
-  
-    INFO("AbsoluteTime, Event = %s, Time = %f \n", event_name, current_time.count());  
-    profile_data.append(event_name);
-    profile_data.append(event_count);
-    profile_data.append("\t");
-    profile_data.append(std::to_string(current_time.count()));
-  
-    if(compare_previous){
-      std::chrono::duration<double, std::ratio<1>> duration_time =
-	time_reading - previous_time;
-
-      profile_data.append("\t");
-      profile_data.append(std::to_string(duration_time.count()));
-      INFO("TimeDuration, Event = %s, Time = %f \n", event_name, duration_time.count());  
-    }
-
-    profile_data.append("\n");  
-  
-    previous_time = time_reading; // set the previous time reading to the current profiled time 
-  }
-
-}
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/rt-controller-api.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/rt-controller-api.h
deleted file mode 100644
index b9fe649325b7da11d3e03db6e21cad386d56747b..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/rt-controller-api.h
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-extern "C"{
-  
-  void llvm_hpvm_initializeRuntimeController(const char *, const char *);
-  void llvm_hpvm_clearRuntimeController();
-  void llvm_hpvm_invokeRtControl(void* result, const char* str, int start, int end); 
-}
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor.h
deleted file mode 100644
index 0b16a51921eeb62bdfceb3540ff6b42c82626d72..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor.h
+++ /dev/null
@@ -1,78 +0,0 @@
-
-
-#ifndef TENSOR_HEADER
-#define TENSOR_HEADER
-
-#include <cuda_runtime.h>
-#include <device_launch_parameters.h>
-#include <cublas_v2.h>
-#include <cudnn.h>
-#include <cublas_api.h>
-#include <cuda_fp16.h>
-#include <driver_types.h>
-
-
-
-struct Norm_t{
-  float mean_l1;
-  float mean_l2;
-  float orig_inf_norm;
-  float l0_norm;
-  float l1_norm;
-  float l2_norm;
-  float inf_norm;
-};
-
-
-struct Dimension{
-  int num_dims;
-  size_t* dim_sizes;
-};
-
-enum data_location_t{
-  HOST,
-  DEVICE
-};
-
-
-struct Tensor{
-  int data_type;
-  int cur_type;
-  int data_format;
-  data_location_t data_placement; // Maintains the location of the tensor {host, device...} 
-  cudnnTensorDescriptor_t tensor_desc;
-  cudnnFilterDescriptor_t filter_desc; // FIXIT: Rethink if this should be in tensor struct
-  cudnnTensorDescriptor_t tensor_half_desc;
-  cudnnFilterDescriptor_t filter_half_desc; // FIXIT: Rethink if this should be in tensor struct
-  void* host_data;
-  void* gpu_data; // Pointer to GPU FP32 data
-  void* gpu_half_data; // Pointer to GPU FP16 data
-  size_t num_elems; // Total elements
-  size_t size_in_bytes; // Total size in bytes
-  struct Dimension dims;
-};
-
-
-
-struct Range{
-  float min;
-  float max;
-};
-
-
-// NOTE: Currently only NCHW is supported due to limited cuDNN support
-enum Tensor_format_t{
-  nchw,
-  nhwc 
-};
-
-enum Tensor_type_t{
-  float_type,
-  double_type,
-  half_type,
-  int_type
-};
-
-
-#endif
-
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_cpu.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_cpu.h
deleted file mode 100644
index 07fb766493a8ddeccc90db60c4345dda7889e193..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_cpu.h
+++ /dev/null
@@ -1,39 +0,0 @@
-
-
-#ifndef TENSOR_HEADER
-#define TENSOR_HEADER
-
-
-struct Dimension{
-  int num_dims;
-  size_t* dim_sizes;
-};
-
-
-struct Tensor{
-  int data_type;
-  int data_format;
-  void* host_data;
-  void* gpu_data; // Pointers should not be device specific - Think: Better design
-  size_t num_elems; // Total elements
-  size_t size_in_bytes; // Total size in bytes
-  struct Dimension dims;
-};
-
-
-enum Tensor_format_t{
-  nchw,
-  nhwc 
-};
-
-enum Tensor_type_t{
-  float_type,
-  double_type,
-  half_type,
-  int_type
-};
-
-
-
-#endif
-
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_cpu_runtime.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_cpu_runtime.h
deleted file mode 100644
index 42969d27712d92c451ae066a44fac0c03cedf170..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_cpu_runtime.h
+++ /dev/null
@@ -1,96 +0,0 @@
-
-#include <stdio.h>
-#include <cstdlib>
-#include <cmath>
-#include <memory>
-#include <string>
-
-
-#ifndef CUDNN_HEADER
-#define CUDNN_HEADER
-
-
-extern "C"{
-  /****  Initialization Routine - Must be inserted at program start (in the backend)  ****/
-  void llvm_hpvm_initTensorRt(int gpuid = 0);
-  void llvm_hpvm_cleanupTensorRt();
-
-  // Routine to moving tensor data (from and to GPU,CPU)
-  void hpvm_request_tensor(void* tensor, int destination);
-
-
-  // NOTE: Currently only using 4-D tensors - 2D and 3D tensors not supported for cuDNN operations
-  // NOTE: The only data format supported as of now is: NCHW (batch_dimension, channels, Height, Width)
-  void* create4DTensor(int data_type, int data_format, size_t dim1_size, size_t dim2_size,
-		       size_t dim3_size, size_t dim4_size);
-  
-  void initTensorData(void* tensor, void* data_ptr, size_t size_in_bytes);
-
-  /********** Tensor Operation API ******/
-
-  // NOTE: For conv_mode, only value '1' is supported
-  void* tensorConvolutionCPU(void* input, void* filter,
-			     int vertical_pad, int horizontal_pad,
-			     int vertical_stride, int horizontal_stride,
-			     int conv_mode, int compute_precision);
-
-  void* tensorPoolingCPU(void* input,
-			 int poolFunction,
-			 int window_height, int window_width,
-			 int vertical_pad, int horizontal_pad,
-			 int vertical_stride, int horizontal_stride);
-
-  void* tensorGemmCPU(void* lhs, void* rhs);
-
-  void* tensorAddCPU(void* x, void* bias);
-
-  void* tensorReluCPU(void* input);
-
-  void* tensorRelu2CPU(void* input, float min, float max);
-  
-  void* tensorTanhCPU(void* input);
-  
-  void* tensorSoftmaxCPU(void* input);
-    
-}
-
-
-/*
-void dummyFunction(){
-
-  void* initRT = (void*) &llvm_hpvm_initTensorRt;
-  void* cleanRT = (void*) &llvm_hpvm_cleanupTensorRt;
-  void* request_tensorPtr = (void*) &hpvm_request_tensor;
-  void* startProf = (void*) &startProfiling;
-  void* stopProf = (void*) &stopProfiling;
-  void* create2Dptr = (void*) &create2DTensor;
-  void* create3Dptr = (void*) &create3DTensor;
-  void* create4Dptr = (void*) &create4DTensor;
-  void* initTensorPtr = (void*) &initTensorData;
-  void* tensorSplitPtr = (void*) &tensorSplit;
-  void* tensorConcatPtr = (void*) &tensorConcat;
-  void* tensorConvPtr = (void*) &tensorConvolution;
-  void* tensorHConvPtr = (void*) &tensorHalfConvolution;
-  void* tensorPoolPtr = (void*) &tensorPooling;
-  void* tensorHalfPoolPtr = (void*) &tensorHalfPooling;
-  void* tensorLRNPtr = (void*) &tensorLRN;
-  void* tensorGemmPr = (void*) &tensorGemm;
-  void* tensorGemmCPUPtr = (void*) &tensorGemmCPU;
-  void* tensorGemmGPUPtr = (void*) &tensorGemmGPU;
-  void* tensorHgemmPtr = (void*) &tensorHalfGemm;
-  void* tensorGemmBiasPtr = (void*) &tensorGemmBias;
-  void* tensorAddPtr = (void*) &tensorAdd;
-  void* tensorHalfAddPtr = (void*) &tensorHalfAdd;
-  void* tensorReluPtr = (void*) &tensorRelu;
-  //FIXME: --void* tensorHalfReluPtr = (void*) &tensorHalfRelu;
-  void* tensorRelu2Ptr = (void*) &tensorRelu2;
-  void* tensorHalfRelu2Ptr = (void*) &tensorHalfRelu2;
-  void* tensorTanhPtr = (void*) &tensorTanh;
-  void* tensorHalfTanhPtr = (void*) &tensorHalfTanh;
-  void* tensorSoftmaxPtr = (void*) &tensorSoftmax;
-  void* tensorAddErrorPtr = (void*) &tensorAddError;    
-}
-*/
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_runtime.cc b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_runtime.cc
deleted file mode 100644
index 2216172eab78414b46814e0d457908f5584c606a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_runtime.cc
+++ /dev/null
@@ -1,114 +0,0 @@
-
-#include <stdio.h>
-#include <cstdlib>
-#include <cmath>
-#include <memory>
-#include <string>
-
-
-#ifndef CUDNN_HEADER
-#define CUDNN_HEADER
-
-
-extern "C"{
-  /****  Initialization Routine - Must be inserted at program start (in the backend)  ****/
-  void llvm_hpvm_initTensorRt(int gpuid = 0);
-  void llvm_hpvm_cleanupTensorRt();
-
-  // Routine to moving tensor data (from and to GPU,CPU)
-  void hpvm_request_tensor(void* tensor, int destination);
-
-  /****** Profiling API - defines profiling scope */
-  void startProfiling();
-  void stopProfiling();
-
-  /****** Routines for tensor creation and initialization *******/
-  void* create2DTensor(int data_type, size_t dim1_size, size_t dim2_size);
-  void* create3DTensor(int data_type, size_t dim1_size, size_t dim2_size,
-		       size_t dim3_size);
-
-  // NOTE: Currently only using 4-D tensors - 2D and 3D tensors not supported for cuDNN operations
-  // NOTE: The only data format supported as of now is: CUDNN_NCHW
-  void* create4DTensor(int data_type, int data_format, size_t dim1_size, size_t dim2_size,
-		       size_t dim3_size, size_t dim4_size);
-  void initTensorData(void* tensor, void* data_ptr, size_t size_in_bytes);
-
-  /********** Tensor Operation API ******/
-
-  void** tensorSplit(void* tensor, int num_splits, int split_dim);
-  void* tensorConcat(void** tensors, int num_splits, int split_dim);
-
-  // NOTE: For conv_mode, only value '1' is supported
-  void* tensorConvolution(void* input, void* filter,
-			  int vertical_pad, int horizontal_pad,
-			  int vertical_stride, int horizontal_stride,
-			  int conv_mode, int compute_precision);
-  void* tensorHConvolution(void* input, void* filter,
-			  int vertical_pad, int horizontal_pad,
-			  int vertical_stride, int horizontal_stride,
-			  int conv_mode, int compute_precision);
-
-  void* tensorPooling(void* input,
-		      int poolFunction,
-		      int window_height, int window_width,
-		      int vertical_pad, int horizontal_pad,
-		      int vertical_stride, int horizontal_stride);
-
-  void* tensorLRN(void* input, unsigned int LRN_window,
-		  double LRN_alpha, double LRN_beta, double LRN_k);
-
-
-  /* 4 different Gemm versions */
-  void* tensorGemm(void* lhs, void* rhs);
-  void* tensorGemmCPU(void* lhs, void* rhs);
-  void* tensorGemmGPU(void* lhs, void* rhs);
-  void* tensorHgemm(void* lhs, void* rhs);
-
-  
-  // NOTE: In-place operation
-  void* tensorGemmBias(void* input, void* bias);
-  // NOTE: In place operation
-  void* tensorAdd(void* x, void* bias);
-  // NOTE: In-place operation
-  void* tensorRelu(void* input);
-  // NOTE: In-place operation
-  void* tensorSoftmax(void* input);
-
-  /* Error injection API - used for accuracy tuning */
-  void* tensorAddError(void* x_ptr);  
-}
-
-
-
-void emptyFunction(){
-
-  void* initRT = (void*) &llvm_hpvm_initTensorRt;
-  void* cleanRT = (void*) &llvm_hpvm_cleanupTensorRt;
-  void* request_tensorPtr = (void*) &hpvm_request_tensor;
-  void* startProf = (void*) &startProfiling;
-  void* stopProf = (void*) &stopProfiling;
-  void* create2Dptr = (void*) &create2DTensor;
-  void* create3Dptr = (void*) &create3DTensor;
-  void* create4Dptr = (void*) &create4DTensor;
-  void* initTensorPtr = (void*) &initTensorData;
-  void* tensorSplitPtr = (void*) &tensorSplit;
-  void* tensorConcatPtr = (void*) &tensorConcat;
-  void* tensorConvPtr = (void*) &tensorConvolution;
-  void* tensorHConvPtr = (void*) &tensorHConvolution;
-  void* tensorPoolPtr = (void*) &tensorPooling;
-  void* tensorLRNPtr = (void*) &tensorLRN;
-  void* tensorGemmPr = (void*) &tensorGemm;
-  void* tensorGemmCPUPtr = (void*) &tensorGemmCPU;
-  void* tensorGemmGPUPtr = (void*) &tensorGemmGPU;
-  void* tensorHgemmPtr = (void*) &tensorHgemm;
-  void* tensorGemmBiasPtr = (void*) &tensorGemmBias;
-  void* tensorAddPtr = (void*) &tensorAdd;
-  void* tensorReluPtr = (void*) &tensorRelu;
-  void* tensorSoftmaxPtr = (void*) &tensorSoftmax;
-  void* tensorAddErrorPtr = (void*) &tensorAddError;
-    
-}
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_runtime.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_runtime.h
deleted file mode 100644
index 06c492c9e8fb45e0a51de153e8cf434a79a50e23..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_runtime.h
+++ /dev/null
@@ -1,229 +0,0 @@
-
-
-#include <stdio.h>
-#include <cstdlib>
-#include <cmath>
-#include <memory>
-#include <string>
-#include "approx_api.h"
-#include "testing_api.h"
-#include "tensor.h"
-#include "rt-controller-api.h"
-
-#include "img_tensor_runtime.h"
-
-#ifndef CUDNN_HEADER
-#define CUDNN_HEADER
-
-
-extern "C"{
-  /****  Initialization Routine - Must be inserted at program start (in the backend)  ****/
-  void llvm_hpvm_initTensorRt(int gpuid = 0);
-  void llvm_hpvm_cleanupTensorRt();
-
-  void llvm_hpvm_initApproxhpvmRt(int gpuid = 0);
-  void llvm_hpvm_cleanupApproxhpvmRt();
-
-  // Routine to moving tensor data (from and to GPU,CPU)
-  void hpvm_request_tensor(void* tensor, int destination);
-
-  /****** Profiling API - defines profiling scope */
-  void startProfiling();
-  void stopProfiling();
-
-  /****** Routines for tensor creation and initialization *******/
-  void* create2DTensor(int data_type, size_t dim1_size, size_t dim2_size);
-  void* create3DTensor(int data_type, size_t dim1_size, size_t dim2_size,
-		       size_t dim3_size);
-
-  // NOTE: Currently only using 4-D tensors - 2D and 3D tensors not supported for cuDNN operations
-  // NOTE: The only data format supported as of now is: CUDNN_NCHW
-  void* create4DTensor(int data_type, int data_format, size_t dim1_size, size_t dim2_size,
-		       size_t dim3_size, size_t dim4_size);
-  void initTensorData(void* tensor, void* data_ptr, size_t size_in_bytes);
-
-  void changeTensorPlacement(struct Tensor* tensor,
-			     data_location_t data_placement);
- 
-  void tensorCopy(void* srcTensor, void* dstTensor);
-  
-  void freeTensor(void*);
-
-  /********** Tensor Operation API ******/
-
-  void** tensorSplit(void* tensor, int num_splits, int split_dim);
-  void* tensorConcat(void** tensors, int num_splits, int split_dim);
-
-  // NOTE: For conv_mode, only value '1' is supported
-  void* tensorConvolution(void* input, void* filter,
-			  int vertical_pad, int horizontal_pad,
-			  int vertical_stride, int horizontal_stride,
-			  int conv_mode, int conv_groups);
-  void* tensorHalfConvolution(void* input, void* filter,
-			      int vertical_pad, int horizontal_pad,
-			      int vertical_stride, int horizontal_stride,
-			      int conv_mode, int conv_groups);
-
-  void* tensorPooling(void* input,
-		      int poolFunction,
-		      int window_height, int window_width,
-		      int vertical_pad, int horizontal_pad,
-		      int vertical_stride, int horizontal_stride);
-
-  void* tensorHalfPooling(void* input,
-			  int poolFunction,
-			  int window_height, int window_width,
-			  int vertical_pad, int horizontal_pad,
-			  int vertical_stride, int horizontal_stride);
-
-  
-  void* tensorLRN(void* input, unsigned int LRN_window,
-		  double LRN_alpha, double LRN_beta, double LRN_k);
-
-
-  /* 4 different Gemm versions */
-  void* tensorGemm(void* lhs, void* rhs);
-  void* tensorGemmCPU(void* lhs, void* rhs);
-  void* tensorGemmGPU(void* lhs, void* rhs); // , void* result_tensor = NULL);
-  void* tensorHalfGemmGPU(void* lhs, void* rhs);
-  void* tensorHalfGemm(void* lhs, void* rhs);
-
-  
-  // NOTE: In-place operation
-  void* tensorGemmBias(void* input, void* bias);
-  // NOTE: In place operation
-  void* tensorAdd(void* x, void* bias);
-  // NOTE: In place operation
-  void* tensorHalfAdd(void* x, void* bias);
-  // NOTE: In-place operation
-  void* tensorRelu(void* input);
-  // NOTE: In-place operation
-  void* tensorHalfRelu(void* input);
-  // NOTE: In-place operation
-  
-  void* tensorTanh(void* input);
-  // NOTE: In-place operation
-  void* tensorHalfTanh(void* input);
-
-  // NOTE: In-place operation
-  void* tensorRelu2(void* input, float min, float max);
-  // NOTE: In-place operation
-  void* tensorHalfRelu2(void* input, float min, float max);
-  // NOTE: In-place operation
-  void* tensorSoftmax(void* input);
-
-  // NOTE: In-place operation
-  void* tensorBatchNorm(void* input_ptr, void* gamma_ptr, void* beta_ptr,
-			void* mean_ptr, void* variance_ptr, double epsilon);
-
-  void* tensorHalfBatchNorm(void* input_ptr, void* gamma_ptr, void* beta_ptr,
-			    void* mean_ptr, void* variance_ptr, double epsilon);
-
-  
-  /* Error injection API - used for accuracy tuning */
-  void* tensorAddError(void* x_ptr, int error_scale);
-  
-  void* tensorGemmModel(void* lhs, void* rhs);
-
-  /*** Error Injection API End **/
-
-
-  /****  PROMISE API *****/
-
-  /*************  
-  --- Synopsys:
-
-  input:  input activation tensor
-  filter: filter tensor
-  bias:  bias tensor
-  conv_pad_h, conv_pad_w:  convolution padding in height and width
-  conv_stride_h, conv_stride_w: convolution stride - vertical and horizontal
-  pool_id: {0, 1}    0: max_pooling ,   1: avg_pooling
-  pool_size: Size of pooling window. Note: Pass '0' for *NO* Pooling
-  activation_id: {-1,0,1,2}   -1: NO Activation, 0: Tanh, 1: Relu, 2: ClippedRelu
-  Swing: PROMISE swing level
-  
-  *************/
-  
-  void* ConvLayer_PROMISE(void* input, float i_min, float i_max,
-			  void* filter, float w_min, float w_max,
-			  void* bias, float b_min, float b_max,
-			  int conv_pad_h, int conv_pad_w, int conv_stride_h, int conv_stride_w,
-			  int pool_id, int pool_size,
-			  int activation_id, // Relu, Tanh, ClipRelu
-			  float out_min, float out_max, int swing); // NOTE: min_val, max_val apply to 'ClippedRelu'
-
-
-  void* FCLayer_PROMISE(void* input, float i_min, float i_max,
-			void* weights, float w_min, float w_max,
-			void* bias, float b_min, float b_max,
-			int activation_id,
-			float out_min, float out_max, int swing); // NOTE: min_val, max_val apply to 'ClippedRelu'
-
-
-  /**** Wrapper Runtime API ***/
-  
-  void* wrapper_ConvLayer(const char* hpvm_node_id,
-			  void* input, 
-			  void* filter, 
-			  void* bias, 
-			  int conv_pad_h, int conv_pad_w,
-			  int conv_stride_h, int conv_stride_w,
-			  int pool_id, int pool_size,
-			  int activation_id, // Relu, Tanh, ClipRelu
-			  float out_min, float out_max);  
-
-
-  void* wrapper_FCLayer(const char* hpvm_node_id,
-			void* input, 
-		        void* weights, 
-		        void* bias, 
-		        int activation_id,
-		        float out_min, float out_max);
-
-  
-  void* wrapper_tensorGroupConvolution(const char* hpvm_node_id, void* input, void* filter,
-				       int vertical_pad, int horizontal_pad,
-				       int vertical_stride, int horizontal_stride,
-				       int conv_mode, int conv_groups);
-
-
-  void* wrapper_tensorRelu(const char* hpvm_node_id, void* input_ptr);
-  
-  void* wrapper_tensorTanh(const char* hpvm_node_id, void* input_ptr);
-  
-  void* wrapper_tensorBatchNorm(const char* hpvm_node_id,
-				void* input_ptr, void* gamma_ptr, void* beta_ptr,
-			        void* mean_ptr, void* variance_ptr, double epsilon);
-  
-  void* wrapper_tensorAdd(const char* hpvm_node_id, void* input_ptr, void* bias_ptr);
-  
-
-  void* wrapper_tensorPooling(const char* hpvm_node_id,
-			      void* input_ptr,
-			      int poolFunction,
-			      int window_height, int window_width,
-			      int vertical_pad, int horizontal_pad,
-			      int vertical_stride, int horizontal_stride);
-
-
-  void* wrapper_tensorSoftmax(const char* hpvm_node_id, void* input_ptr);
-
-  
-}
-
-
-void dumpAccuracyNorms();
-void readOpenTunerFlags(const char* file_name);
-void clearOpCounter();
-void clearTensorMap();
-void startMemTracking();
-void freeOutputTensors();
-void freeBatchMemory();
-void* quantizeTensorPromise(void* input_ptr, float min, float max);
-void* addPromiseError(void* x_ptr, int error_scale);
-void readSkipTensors(int* skip_tensor_ids, int op_count);
-
-
-
-#endif
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_signatures.cc b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_signatures.cc
deleted file mode 100644
index e8947881765637d68ca9d95d716c97d486e8380a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_signatures.cc
+++ /dev/null
@@ -1,58 +0,0 @@
-
-#include "tensor_runtime.h"
-
-
-void dummyFunction(){
-
-  void* initRT = (void*) &llvm_hpvm_initTensorRt;
-  void* cleanRT = (void*) &llvm_hpvm_cleanupTensorRt;
-
-  void* initApproxRT = (void*) &llvm_hpvm_initApproxhpvmRt;
-  void* cleanApproxRT = (void*) &llvm_hpvm_cleanupApproxhpvmRt;
-
-  void* initRTController = (void*) &llvm_hpvm_initializeRuntimeController;
-  void* cleanRTController = (void*) &llvm_hpvm_clearRuntimeController;
-  
-  void* request_tensorPtr = (void*) &hpvm_request_tensor;
-  void* startProf = (void*) &startProfiling;
-  void* stopProf = (void*) &stopProfiling;
-  void* create2Dptr = (void*) &create2DTensor;
-  void* create3Dptr = (void*) &create3DTensor;
-  void* create4Dptr = (void*) &create4DTensor;
-  void* initTensorPtr = (void*) &initTensorData;
-  void* tensorSplitPtr = (void*) &tensorSplit;
-  void* tensorConcatPtr = (void*) &tensorConcat;
-  void* tensorConvPtr = (void*) &tensorConvolution;
-  void* tensorHConvPtr = (void*) &tensorHalfConvolution;
-  void* tensorPoolPtr = (void*) &tensorPooling;
-  void* tensorHalfPoolPtr = (void*) &tensorHalfPooling;
-  void* tensorLRNPtr = (void*) &tensorLRN;
-  void* tensorGemmPr = (void*) &tensorGemm;
-  void* tensorGemmCPUPtr = (void*) &tensorGemmCPU;
-  void* tensorGemmGPUPtr = (void*) &tensorGemmGPU;
-  void* tensorHgemmPtr = (void*) &tensorHalfGemm;
-  void* tensorGemmBiasPtr = (void*) &tensorGemmBias;
-  void* tensorAddPtr = (void*) &tensorAdd;
-  void* tensorHalfAddPtr = (void*) &tensorHalfAdd;
-  void* tensorReluPtr = (void*) &tensorRelu;
-  //FIXME: --void* tensorHalfReluPtr = (void*) &tensorHalfRelu;
-  void* tensorRelu2Ptr = (void*) &tensorRelu2;
-  void* tensorHalfRelu2Ptr = (void*) &tensorHalfRelu2;
-  void* tensorTanhPtr = (void*) &tensorTanh;
-  void* tensorHalfTanhPtr = (void*) &tensorHalfTanh;
-  void* tensorSoftmaxPtr = (void*) &tensorSoftmax;
-  void* tensorBatchNormPtr = (void*) &tensorBatchNorm;
-  void* tensorAddErrorPtr = (void*) &tensorAddError;
-  void* ConvLayer = (void*) &ConvLayer_PROMISE;
-  void* FCLayer = (void*) &FCLayer_PROMISE;
-  
-  void* ConvLayer2 = (void*) &wrapper_ConvLayer;
-  void* FCLayer2 = (void*) &wrapper_FCLayer;
-  void* AddWrapper = (void*) &wrapper_tensorAdd;
-  void* ReluWrapper = (void*) &wrapper_tensorRelu;    
-  void* TanhWrapper = (void*) &wrapper_tensorTanh;
-  void* BatchNormWrapper = (void*) &wrapper_tensorBatchNorm;    
-  void* PoolingWrapper = (void*) &wrapper_tensorPooling;    
-  void* softmaxWrapper = (void*) &wrapper_tensorSoftmax;    
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_utils.cu b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_utils.cu
deleted file mode 100644
index c65fd8b0163b80459476b4ab3c61ce48580cc0ff..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/tensor_utils.cu
+++ /dev/null
@@ -1,575 +0,0 @@
-
-#ifndef TENSOR_UTILS_HEADER
-#define TENSOR_UTILS_HEADER
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <stdarg.h>
-#include <cstdio>
-#include <cstdlib>
-#include <cstring>
-#include <cmath>
-#include <ctime>
-#include <cfloat>
-#include <algorithm>
-#include <chrono>
-#include <iomanip>
-#include <iostream>
-#include <map>
-#include <memory>
-#include <random>
-#include <sstream>
-#include <string>
-
-#include <cuda_runtime.h>
-#include <device_launch_parameters.h>
-
-#include <cublas_v2.h>
-#include <cudnn.h>
-#include <cublas_api.h>
-#include <vector>
-
-//#include "../../dnn_sources/include/types.h"
-#include "tensor_runtime.h"
-#include "debug.h"
-#include "tensor.h"
-#include "global_data.h"
-#include "fp16_gemm.cu"
-
-
-// used to map HPVM tensors to runtime tensors (with extra runtime-specific information)
-std::vector<Tensor*> tensorsArr;
-int total_tensors = 0;
-
-
-
-/*void addRuntimeTensor(struct Tensor* hpvm_tensor, struct Tensor* tensor){
-  
-  hpvm_tensor->tensor_id = total_tensors;
-  total_tensors++;      
-  tensorsArr.push_back(tensor);
-}
-
-
-struct Tensor* getRuntimeTensor(struct Tensor* hpvm_tensor){
-  int tensor_id = hpvm_tensor->tensor_id;
-  if(tensor_id < total_tensors)
-    return tensorsArr[tensor_id];
-  else
-    ERROR("Tensor not found in runtime. Aborting ...");
-  return NULL;
-}
-*/
-
-
-/*void freeCudaPtr(void* cuda_ptr){
-
-  int i;
-  for(i = 0; i < tensors_ptr.size(); i++){
-    if(tensors_ptr[i] == cuda_ptr)
-      break;   
-  }
-
-  tensors_ptr.erase(tensors_ptr.begin()+i);
-  cudaFree(cuda_ptr);
-}
-*/
-
-
-void freeTensor(void* tensor_ptr){
-
-  Tensor* tensor = (Tensor*) tensor_ptr;
-
-  for(int i = 0; i < tensors_ptr.size(); i++){
-    if(tensors_ptr[i] == tensor->gpu_data)
-      tensors_ptr.erase(tensors_ptr.begin()+i);   
-  }
-
-  for(int i = 0; i < tensors_ptr.size(); i++){
-    if(tensors_ptr[i] == tensor->gpu_half_data)
-      tensors_ptr.erase(tensors_ptr.begin()+i);   
-  }
-
-  for(int i = 0; i < host_ptr.size(); i++){
-    if(host_ptr[i] == tensor->host_data)
-      host_ptr.erase(host_ptr.begin()+i);   
-  }
-
-  for(int i = 0; i < obj_ptr.size(); i++){
-    if(obj_ptr[i] == tensor_ptr)
-      obj_ptr.erase(obj_ptr.begin()+i);   
-  }
-
-  
-  cudaFree(tensor->gpu_data);
-  cudaFree(tensor->gpu_half_data);
-
-  tensor->gpu_data = NULL;
-  tensor->gpu_half_data = NULL;
-  free(tensor->host_data);
-  tensor->host_data = NULL;
-  
-}
-
-
-// Returns the size of the target cudnn datatype
-int getTypeSize(int data_type){
-  if(data_type == CUDNN_DATA_FLOAT)
-    return 4;
-
-  if(data_type == CUDNN_DATA_HALF)
-    return 2;
-
-  INFO("Given type = %d, expected type = %d \n", data_type, CUDNN_DATA_FLOAT);
-  return 1;
-  // TODO: Add support for more data types
-}
-
-
-void setSizeInBytes(struct Tensor* tensor, int data_type, size_t num_elems){
-  int type_size = getTypeSize(data_type);
-  size_t size_in_bytes = type_size * num_elems;
-  tensor->size_in_bytes = size_in_bytes;
-
-  DEBUG("***--- size_in_bytes = %d \n", size_in_bytes);
-}
-
-
-// NOTE: Always allocates FP32 on Host, FP32/FP16 for Device (GPU)
-void allocateMem(struct Tensor* tensor, int data_type, size_t num_elems){
-  setSizeInBytes(tensor, data_type, num_elems);
-  tensor->data_type = data_type;
-  tensor->cur_type = data_type; // type maintained for hanlding FP32 <-> FP16 conversions
-  tensor->num_elems = num_elems;
-  
-  size_t size_on_host = num_elems * 4; // NOTE: On host, always FP32
-  tensor->host_data = (void*) malloc(size_on_host); // Allocate memory on the host
-  tensor->data_placement = HOST; // By defaut data is on the host
-  
-  //printf("Allocating Sizes = %lu \n", tensor->size_in_bytes);
-
-  if(data_type == float_type){
-    checkCudaErrors(cudaMalloc(&tensor->gpu_data, tensor->size_in_bytes)); // Allocate FP32
-    tensor->gpu_half_data = NULL;
-  }
-  else{
-    checkCudaErrors(cudaMalloc(&tensor->gpu_half_data, tensor->size_in_bytes)); // Allocate FP16
-    tensor->gpu_data = NULL;
-  }
-  
-  tensors_ptr.push_back(tensor->gpu_data);
-  tensors_ptr.push_back(tensor->gpu_half_data);
-
-  tracked_tensors[tensor] = 1; // For FP16-FP32 data handling
-  
-  host_ptr.push_back(tensor->host_data);
-  obj_ptr.push_back(tensor);
-  //host_ptr.push_back(tensor->host_data); 
-}
-
-
-void setCudnnDataFormat(struct Tensor* tensor, int data_format){
-
-  switch(data_format){
-  case 0:
-    data_format = CUDNN_TENSOR_NCHW; break;
-  case 1:
-    data_format = CUDNN_TENSOR_NHWC; break;
-  
-  default:
-    break;
-  }
-
-  tensor->data_format = data_format;
-  DEBUG("tensor->data_format = %d \n", tensor->data_format);
-}
-
-
-void set4DFilterDescriptor(struct Tensor* tensor, int data_format, size_t dim1_size,
-			   size_t dim2_size, size_t dim3_size, size_t dim4_size){
-
-  setCudnnDataFormat(tensor, data_format);
-  
-  checkCUDNN(cudnnCreateFilterDescriptor(&tensor->filter_desc));
-
-  checkCUDNN(cudnnCreateFilterDescriptor(&tensor->filter_half_desc));
-
-  
-  checkCUDNN(cudnnSetFilter4dDescriptor(tensor->filter_desc,
-					(cudnnDataType_t) CUDNN_DATA_FLOAT, //tensor->data_type,
-					(cudnnTensorFormat_t) tensor->data_format,
-					dim1_size,
-					dim2_size, 
-					dim3_size,
-					dim4_size));
-
-  checkCUDNN(cudnnSetFilter4dDescriptor(tensor->filter_half_desc,
-					(cudnnDataType_t) CUDNN_DATA_HALF,
-					(cudnnTensorFormat_t) tensor->data_format,
-					dim1_size,
-					dim2_size, 
-					dim3_size,
-					dim4_size));  
-
-}
-
-
-
-void set4DTensorDescriptor(struct Tensor* tensor, int data_format, size_t dim1_size,
-			   size_t dim2_size, size_t dim3_size, size_t dim4_size){
-
-  setCudnnDataFormat(tensor, data_format);
-
-  checkCUDNN(cudnnCreateTensorDescriptor(&tensor->tensor_desc));
-
-  checkCUDNN(cudnnCreateTensorDescriptor(&tensor->tensor_half_desc));
-
-  // For certain operations, the strides may need to change - in which case the descriptor
-  // needs to be reinitialized
-  cudnnSetTensor4dDescriptor(tensor->tensor_desc,
-			     (cudnnTensorFormat_t) tensor->data_format, // Data format
-			     (cudnnDataType_t) CUDNN_DATA_FLOAT, //tensor->data_type, // Data type
-			     dim1_size, dim2_size, 
-			     dim3_size, dim4_size);
-
-
-  cudnnSetTensor4dDescriptor(tensor->tensor_half_desc,
-			     (cudnnTensorFormat_t) tensor->data_format, // Data format
-			     (cudnnDataType_t) CUDNN_DATA_HALF, // Data type
-			     dim1_size, dim2_size, 
-			     dim3_size, dim4_size);
-
-  
-  cudnnDataType_t dType;
-  int nStride, cStride, hStride, wStride;
-  int size1, size2, size3, size4;
-  cudnnGetTensor4dDescriptor(tensor->tensor_desc,
-  			     &dType,
-  			     &size1, &size2, &size3, &size4,
-  			     &nStride, &cStride, &hStride, &wStride);
-			   
-  INFO("nStride = %d, cStride = %d, hStride = %d, wStride = %d \n",
-  	 nStride, cStride, hStride, wStride);
-}
-
-
-// FIXIT: Striding still not working - hence 2D and 3D tensor support is missing
-void setTensorDescriptor(struct Tensor* tensor, int num_dims,
-			 size_t* dim_sizes){
-
-  checkCUDNN(cudnnCreateTensorDescriptor(&tensor->tensor_desc));
-
-  int* strides = (int*) malloc(sizeof(int) * num_dims);
-  strides[num_dims - 1] = 1;
-  for(int i = num_dims - 2; i >= 0; i--){
-    strides[i] = strides[i+1] * dim_sizes[i+1];
-  }
-
-  for(int i = 0; i < num_dims; i++){
-    INFO("strides[%d] = %d \n", i, strides[i]);
-  }
-
-  int* const_dims = (int*) malloc(sizeof(int) * num_dims);
-  for(int j = 0 ; j < num_dims; j++){
-    const_dims[j] = (int) dim_sizes[j];
-    INFO("const_dim = %d \n", const_dims[j]);
-  }
-  
-  INFO("data_type = %d, cuDNN_value = %d \n", tensor->data_type, CUDNN_DATA_FLOAT); 
-  // For certain operations, the strides may need to change - in which case the descriptor
-  // needs to be reinitialized
-  checkCUDNN(cudnnSetTensorNdDescriptor(tensor->tensor_desc,
-					(cudnnDataType_t) tensor->data_type, // Data type
-					num_dims,
-					(const int*) const_dims,
-					(const int*) strides));
-}
-
-
-
-
-extern "C"{
-
-  void* create2DTensor(int data_type, size_t dim1_size, size_t dim2_size){
-    struct Tensor* tensor = (struct Tensor*) malloc(sizeof(Tensor));
-    size_t num_elems = dim1_size * dim2_size;
-    allocateMem(tensor, data_type, num_elems);
-    // Setting the tensor dimensions  
-    size_t* dim_sizes = (size_t*) malloc(sizeof(size_t) * 2);
-    dim_sizes[0] = dim1_size;
-    dim_sizes[1] = dim2_size;
-    tensor->dims.dim_sizes = dim_sizes;
-    tensor->dims.num_dims = 2;
-  
-    return tensor;
-  }
-
-
-  void* create3DTensor(int data_type, size_t dim1_size, size_t dim2_size,
-		       size_t dim3_size){
-    struct Tensor* tensor = (struct Tensor*) malloc(sizeof(Tensor));
-    size_t num_elems = dim1_size * dim2_size * dim3_size;
-    allocateMem(tensor, data_type, num_elems);
-    // Setting the tensor dimensions  
-    size_t* dim_sizes = (size_t*) malloc(sizeof(size_t) * 3);
-    dim_sizes[0] = dim1_size;
-    dim_sizes[1] = dim2_size;
-    dim_sizes[2] = dim3_size;
-    tensor->dims.dim_sizes = dim_sizes;
-    tensor->dims.num_dims = 3;
-
-    return tensor;
-  }
-
-
-  void* create4DTensor(int data_type, int data_format, size_t dim1_size, size_t dim2_size,
-		       size_t dim3_size, size_t dim4_size){
-    struct Tensor* tensor = (struct Tensor*) malloc(sizeof(Tensor));
-    size_t num_elems = dim1_size * dim2_size * dim3_size * dim4_size;
-    allocateMem(tensor, data_type, num_elems);
-    // Setting the tensor dimensions  
-    size_t* dim_sizes = (size_t*) malloc(sizeof(size_t) * 4);
-    dim_sizes[0] = dim1_size;
-    dim_sizes[1] = dim2_size;
-    dim_sizes[2] = dim3_size;
-    dim_sizes[3] = dim4_size;
-    tensor->dims.dim_sizes = dim_sizes;
-    tensor->dims.num_dims = 4;
-    // Done setting tensor dimensions  
-    //setTensorDescriptor(tensor, 4, dim_sizes);
-    set4DTensorDescriptor(tensor, data_format, dim1_size, dim2_size, dim3_size, dim4_size);
-    // FIXIT: filter descriptor should be invoked only for filters
-    set4DFilterDescriptor(tensor, data_format, dim1_size, dim2_size, dim3_size, dim4_size);
-  
-    return tensor;
-  }
-
-
-  void initTensorData(void* tensor_ptr, void* data_ptr, size_t size_in_bytes){
-
-    Tensor* tensor = (Tensor*) tensor_ptr;
-
-    size_t host_size_in_bytes = tensor->num_elems * 4;
-    //if(tensor->size_in_bytes != size_in_bytes){
-    if(host_size_in_bytes != size_in_bytes){
-      ERROR("The destination and source sizes don't match");
-    }
-  
-    std::memcpy(tensor->host_data, data_ptr, size_in_bytes);
-
-    changeTensorPlacement(tensor, HOST);
-
-    tensor->cur_type = float_type;
-  }
-
-		      
-
-  void hostToDeviceCopy(struct Tensor* tensor){
-
-    if(tensor->data_placement != DEVICE){
-      cudaMemcpy(tensor->gpu_data, tensor->host_data, tensor->size_in_bytes,
-		 cudaMemcpyHostToDevice);
-      INFO("Moving %d bytes from host to GPU \n", tensor->size_in_bytes);
-      tensor->data_placement = DEVICE;
-    }
-    else{
-      DEBUG("No data movement required - Data on Device \n");    
-    }
-  
-  }
-
-
-  void deviceToHostCopy(struct Tensor* tensor){
-
-    if(tensor->data_placement != HOST){
-      cudaMemcpy(tensor->host_data, tensor->gpu_data, tensor->size_in_bytes,
-		 cudaMemcpyDeviceToHost);  
-      INFO("Moving %d bytes from GPU to host \n", tensor->size_in_bytes);
-      tensor->data_placement = HOST;
-    }
-    else{
-      DEBUG("No data movement required - Data on Host \n");    
-    }
-    
-  }
-
-
-  //void tensorCopy(struct Tensor* srcTensor, struct Tensor* dstTensor){
-
-  void tensorCopy(void* srcTensor_ptr, void* dstTensor_ptr){
-
-    struct Tensor* srcTensor = (struct Tensor*) srcTensor_ptr;
-    struct Tensor* dstTensor = (struct Tensor*) dstTensor_ptr;
-
-    
-    if(srcTensor->data_placement == HOST){
-      memcpy(dstTensor->host_data, srcTensor->host_data, srcTensor->size_in_bytes);  
-      INFO("Moving %d bytes from host to host \n", srcTensor->size_in_bytes);
-      dstTensor->data_placement = HOST;
-    }
-    else if (srcTensor->data_placement == DEVICE){
-      cudaMemcpy(dstTensor->gpu_data, srcTensor->gpu_data, srcTensor->size_in_bytes,
-		 cudaMemcpyDeviceToDevice);
-      INFO("Moving %d bytes from GPU to GPU \n", srcTensor->size_in_bytes);
-      dstTensor->data_placement = DEVICE;
-    }
-    
-  }
-
-
-  void hpvm_request_tensor(void* tensor_ptr, int destination){
-
-    Tensor* tensor = (Tensor*) tensor_ptr;
-    // If destination is the host
-    if(destination == 0){  
-      if(tensor->data_placement != HOST){
-	cudaMemcpy(tensor->host_data, tensor->gpu_data, tensor->size_in_bytes,
-		   cudaMemcpyDeviceToHost);  
-	DEBUG("Moving %d bytes from GPU to host \n", tensor->size_in_bytes);
-	tensor->data_placement = HOST;
-      }
-      else{
-	DEBUG("No data movement required - Data on Host \n");    
-      }
-    }
-    // If destination is the GPU
-    else if(destination == 1){
-
-      if(tensor->data_placement != DEVICE){
-	cudaMemcpy(tensor->gpu_data, tensor->host_data, tensor->size_in_bytes,
-		   cudaMemcpyHostToDevice);
-	INFO("Moving %d bytes from host to GPU \n", tensor->size_in_bytes);
-	tensor->data_placement = DEVICE;
-      }
-      else{
-	DEBUG("No data movement required - Data on Device \n");    
-      }    
-    }
-  
-  }
-  
-}
-
-
-
-
-bool ONLINE_PROFILING = false; // true;
-
-
-void convertToFP16(struct Tensor* tensor){
-
-  if(tensor == NULL)
-    return;
-  
-  printf("**** cur_type = %d , half_type = %d \n", tensor->cur_type, half_type);
-
-  if (ONLINE_PROFILING){
-    if (tensor->cur_type == half_type)
-      return;
-  }
-    
-  DEBUG("ConvertoFP16 \n");
-
-  setSizeInBytes(tensor, half_type, tensor->num_elems);
-  size_t size_in_bytes = tensor->size_in_bytes;
-  DEBUG("size_in_bytes = %d \n", size_in_bytes);
-  
-  if(tensor->gpu_half_data == NULL)
-     checkCudaErrors(cudaMalloc(&tensor->gpu_half_data, size_in_bytes)); // Allocate memory on GPU
-  
-
-  // If Tensor is one of Tracked (has to free per batch) then track all data types
-  if(tracked_tensors.find(tensor) != tracked_tensors.end())
-    tensors_ptr.push_back(tensor->gpu_half_data);
-  
-  f2h((float*) tensor->gpu_data, tensor->num_elems, (half*) tensor->gpu_half_data);
-
-  tensor->cur_type = half_type;  
-}
-
-
-
-void convertToFP32(struct Tensor* tensor){
-
-  if(tensor == NULL)
-    return;
-  
-  // Need this check for both offline and online profiling path
-  if (tensor->cur_type == float_type)
-    return;
-    
-  DEBUG("ConvertoFP32 \n");
-  
-  setSizeInBytes(tensor, float_type, tensor->num_elems);
-  size_t size_in_bytes = tensor->size_in_bytes;
-  
-  // If FP32 data array doesn't exist, allocate
-  if(tensor->gpu_data == NULL){
-    checkCudaErrors(cudaMalloc(&tensor->gpu_data, size_in_bytes)); // Allocate memory on GPU
-    DEBUG("NOTE: Allocating new FP32 Array with size = %lu \n", size_in_bytes);
-  }
-
-
-  // If Tensor is one of Tracked (has to free per batch) then track all data types
-  if(tracked_tensors.find(tensor) != tracked_tensors.end())
-    tensors_ptr.push_back(tensor->gpu_data);
-
-  h2f((half*) tensor->gpu_half_data, tensor->num_elems, (float*) tensor->gpu_data);
-
-  tensor->cur_type = float_type;
-
-}
-
-
-
-void convertToFP32_offline(struct Tensor* tensor){
-
-  if(tensor == NULL)
-    return;
-  
-  if(ONLINE_PROFILING){
-    return;
-  }
-  
-  DEBUG("ConvertoFP32 \n");
-
-  setSizeInBytes(tensor, float_type, tensor->num_elems);
-  size_t size_in_bytes = tensor->size_in_bytes;
-
-  // If FP32 data array doesn't exist, allocate
-  if(tensor->gpu_data == NULL){
-    checkCudaErrors(cudaMalloc(&tensor->gpu_data, size_in_bytes)); // Allocate memory on GPU
-    DEBUG("NOTE: Allocating new FP32 Array with size = %lu \n", size_in_bytes);
-  }
-
-  // If Tensor is one of Tracked (has to free per batch) then track all data types
-  if(tracked_tensors.find(tensor) != tracked_tensors.end())
-    tensors_ptr.push_back(tensor->gpu_data);
-
-  h2f((half*) tensor->gpu_half_data, tensor->num_elems, (float*) tensor->gpu_data);
-
-  tensor->cur_type = float_type;
-
-  //freeCudaPtr(tensor->gpu_half_data);
-  //cudaFree(tensor->gpu_half_data);
-  //tensor->gpu_half_data = NULL;
-}
-
-
-
-
-
-// Called from within the runtime to change the data placement
-// This routine is required to change the output data placements from host to device
-void changeTensorPlacement(struct Tensor* tensor, data_location_t data_placement){
-
-  if(tensor == NULL)
-    ERROR("Tensor == NULL");
-  tensor->data_placement = data_placement;
-}
-
-
-#endif
-
-
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/testing_api.h b/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/testing_api.h
deleted file mode 100644
index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/include/testing_api.h
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/img_tensor_runtime.cu b/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/img_tensor_runtime.cu
deleted file mode 100644
index 0460e490fd2b188b85f53cf9b109f09ac3d6b83a..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/img_tensor_runtime.cu
+++ /dev/null
@@ -1,137 +0,0 @@
-#include "../include/debug.h"
-#include "../include/img_tensor_runtime.h"
-#include "../include/approxhpvm_img_runtime_utils.h" 
-
-// ***                       Runtime implementation                      *** //
-void* tensorFft(void* input) {
-
-}
-
-void* tensorReduce(void* input) {
-
-}
-
-void* tensorReductionSamplingReduce(void* input) {
-
-}
-
-void* tensorProjectiveT(void* input) {
-
-}
-
-void* tensorMap1(void* input) {
-
-}
-
-void* tensorMap2(void* input) {
-
-}
-
-void* tensorMap3(void* input) {
-
-}
-
-
-// ***                     Wrapper API implementation                    *** //
-
-void* wrapper_tensorFft(const char* hpvm_node_id, void* input) {
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-  // Approximation choices must be for a fft operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::FFT &&
-         "Invalid configuration generated for tensor fft wrapper operation");
-  return handleTensorFftApproximationTuples(ApproxChoices[0].second,
-                                            input);
-}
-
-void* wrapper_tensorReduce(const char* hpvm_node_id, void* input) {
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-  // Approximation choices must be for a reduce operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::REDUCE &&
-         "Invalid configuration generated for tensor reduce wrapper operation");
-  return handleTensorReduceApproximationTuples(ApproxChoices[0].second,
-                                            input);
-}
-
-void* wrapper_tensorProjectiveT(const char* hpvm_node_id, void* input) {
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-  // Approximation choices must be for a projectiveT operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::PROJECTIVE_T &&
-         "Invalid configuration generated for tensor projectiveT wrapper operation");
-  return handleTensorProjectiveTApproximationTuples(ApproxChoices[0].second,
-                                            input);
-}
-
-void* wrapper_tensorMap1(const char* hpvm_node_id, void* input) {
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-  // Approximation choices must be for a map1 operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::MAP1 &&
-         "Invalid configuration generated for tensor map1 wrapper operation");
-  return handleTensorMap1ApproximationTuples(ApproxChoices[0].second,
-                                            input);
-}
-
-void* wrapper_tensorMap2(const char* hpvm_node_id, void* input) {
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-  // Approximation choices must be for a map2 operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::MAP2 &&
-         "Invalid configuration generated for tensor map2 wrapper operation");
-  return handleTensorMap2ApproximationTuples(ApproxChoices[0].second,
-                                            input);
-}
-
-void* wrapper_tensorMap3(const char* hpvm_node_id, void* input) {
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-  // Approximation choices must be for a map3 operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::MAP3 &&
-         "Invalid configuration generated for tensor map3 wrapper operation");
-  return handleTensorMap3ApproximationTuples(ApproxChoices[0].second,
-                                            input);
-}
-
-// Tentative
-void* wrapper_tensorStencil(const char* hpvm_node_id, void* input) {
-  ERROR("Stencil operation currently unsupported.\n");
-  abort();
-}
-
-void* wrapper_tensorCosineT(const char* hpvm_node_id, void* input) {
-  ERROR("CosineT operation currently unsupported.\n");
-  abort();
-}
-
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/tensor_cpu_runtime.cc b/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/tensor_cpu_runtime.cc
deleted file mode 100644
index 7397e78013c1e0284314ba8e47012c435345da59..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/tensor_cpu_runtime.cc
+++ /dev/null
@@ -1,465 +0,0 @@
-/* This file includes the API implementation of the HPVM tensor runtime built on
- *cublas, cudnn
- **
- **  Author: Hashim Sharif
- **  Email: hsharif3@illinois.edu
- */
-
-#include <algorithm>
-#include <cfloat>
-#include <cmath>
-#include <cstdio>
-#include <cstdlib>
-#include <cstring>
-#include <ctime>
-#include <iostream>
-#include <limits>
-#include <map>
-#include <memory>
-#include <sstream>
-#include <stdarg.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string>
-#include <vector>
-
-// Tensor runtime header files
-#include "../include/tensor_cpu.h"
-#include "../include/tensor_cpu_runtime.h"
-
-
-extern "C"{
-
-  void llvm_hpvm_initTensorRt(int gpuid) {
-    // NOTE: Do Nothing
-  }
-
-  void llvm_hpvm_cleanupTensorRt() {
-    // NOTE: Do Nothing
-  }
-
-  void hpvm_request_tensor(void *tensor, int destination) {
-    // NOTE: Do Nothing
-  }
-
-  // Returns the size of the target cudnn datatype
-  int getTypeSize(int data_type) __attribute__((always_inline));
-  inline int getTypeSize(int data_type) {
-    // Float/Int data type - Full Precision
-    if (data_type == 0)
-      return 4;
-    // Half data type
-    if (data_type == 1)
-      return 2;
-
-    return 1;
-  }
-
-  void setSizeInBytes(struct Tensor *tensor, int data_type, size_t num_elems) __attribute__((always_inline));
-  inline void setSizeInBytes(struct Tensor *tensor, int data_type, size_t num_elems) {
-    int type_size = getTypeSize(data_type);
-    size_t size_in_bytes = type_size * num_elems;
-    tensor->size_in_bytes = size_in_bytes;
-  }
-
-  void allocateMemCPU(struct Tensor *tensor, int data_type, size_t num_elems) __attribute__((always_inline)); 
-  inline void allocateMemCPU(struct Tensor *tensor, int data_type, size_t num_elems) {
-    setSizeInBytes(tensor, data_type, num_elems);
-    tensor->data_type = data_type;
-    tensor->num_elems = num_elems;
-    tensor->host_data =
-      (void *)malloc(tensor->size_in_bytes); // Allocate memory on the host
-  }
-
-  void initTensorData(void *tensor_ptr, void *data_ptr, size_t size_in_bytes) {
-
-    Tensor *tensor = (Tensor *)tensor_ptr;
-    if (tensor->size_in_bytes != size_in_bytes) {
-      printf("The destination and source sizes don't match");
-    }
-    memcpy(tensor->host_data, data_ptr, size_in_bytes);
-  }
-
-  
-  void *create4DTensorInternal(int data_type, int data_format, size_t dim1_size,
-			  size_t dim2_size, size_t dim3_size, size_t dim4_size) __attribute__((always_inline));
-  inline void *create4DTensorInternal(int data_type, int data_format, size_t dim1_size,
-			  size_t dim2_size, size_t dim3_size, size_t dim4_size) {
-
-    struct Tensor *tensor = (struct Tensor *)malloc(sizeof(Tensor));
-    size_t num_elems = dim1_size * dim2_size * dim3_size * dim4_size;
-
-    allocateMemCPU(tensor, data_type, num_elems);
-    // Setting the tensor dimensions
-    size_t *dim_sizes = (size_t *)malloc(sizeof(size_t) * 4);
-    dim_sizes[0] = dim1_size;
-    dim_sizes[1] = dim2_size;
-    dim_sizes[2] = dim3_size;
-    dim_sizes[3] = dim4_size;
-    tensor->dims.dim_sizes = dim_sizes;
-    tensor->dims.num_dims = 4;
-
-    return tensor;
-  }
-
-  void* create4DTensor(int data_type, int data_format, size_t dim1_size,
-		       size_t dim2_size, size_t dim3_size, size_t dim4_size) {
-
-    return create4DTensorInternal(data_type, data_format, dim1_size, dim2_size, dim3_size, dim4_size);
-  }
-
-
-  void* __attribute__((always_inline)) tensorAddCPU(void *x_ptr, void *bias_ptr) {
-
-    Tensor *x = (Tensor *)x_ptr;
-    Tensor *bias = (Tensor *)bias_ptr;
-
-    float *x_data = (float *)x->host_data;
-    float *bias_data = (float *)bias->host_data;
-
-    int n = x->dims.dim_sizes[0];
-    int c = x->dims.dim_sizes[1];
-    int h = x->dims.dim_sizes[2];
-    int w = x->dims.dim_sizes[3];
-
-    size_t num_elems = x->num_elems;
-    size_t num_elems2 = bias->num_elems;
-
-    if (num_elems == num_elems2) {
-      for (size_t i = 0; i < num_elems; i++) {
-	x_data[i] += bias_data[i];
-      }
-    } else {
-
-      for (int i = 0; i < n; i++) {
-	for (int j = 0; j < c; j++) {
-	  for (int k = 0; k < h; k++) {
-	    for (int l = 0; l < w; l++) {
-	      x_data[i * (c * h * w) + j * (h * w) + k * w + l] += bias_data[j];
-	    }
-	  }
-	}
-      }
-    }
-
-    return x;
-  }
-
-  void *tensorGemmCPU(void *lhs_ptr, void *rhs_ptr) {
-
-    Tensor *lhs = (Tensor *)lhs_ptr;
-    Tensor *rhs = (Tensor *)rhs_ptr;
-
-    // 'm' holds the batch dimension - assuming NCHW format Tensors
-    int m = lhs->dims.dim_sizes[0];
-    // The rhs must be a 2D tensor
-    int n = rhs->dims.dim_sizes[rhs->dims.num_dims - 1]; // output neurons
-    int k = 1;
-    // Flattening the dimensions after the batch dimension
-    // NOTE: Allowing any number of dimensions > 2 for lhs
-    for (int j = 1; j < lhs->dims.num_dims; j++) {
-      k = k * lhs->dims.dim_sizes[j]; // input neurons
-    }
-
-    int rhs_k = rhs->dims.dim_sizes[rhs->dims.num_dims - 2];
-
-    // NOTE: Creating a 4D tensor to be compatible with later called cuDNN
-    // routines
-    Tensor *output = (Tensor *)create4DTensorInternal(0, 0, m, n, 1, 1);
-
-    float *lhs_arr = (float *)lhs->host_data;
-    float *rhs_arr = (float *)rhs->host_data;
-    float *output_arr = (float *)output->host_data;
-
-    for (int i = 0; i < m; i++) {
-      for (int j = 0; j < n; j++) {
-	float sum = 0.0;
-	for (int l = 0; l < k; l++) {
-	  float mul = lhs_arr[i * k + l] * rhs_arr[l * n + j];
-	  sum = sum + mul;
-	}
-	output_arr[i * n + j] = sum;
-      }
-    }
-
-    return output;
-  }
-
-  float power(float num, int exp) __attribute__((always_inline));
-  inline float power(float num, int exp){
-    bool neg = false; 
-    if (exp < 0) {
-      neg = true;
-      exp = -1 * exp;
-    }
-
-    float pow = 1;
-    for (int i = 0; i < exp; i++) {
-      pow = pow * num;
-    }
-  
-    if(neg)
-      return 1 / pow;
-    else
-      return pow;
-  }
-
-  float epow(float x) __attribute__((always_inline));
-  inline float epow(float x){
-
-    bool neg = false;
-    if (x < 0) {
-      x = -1 * x;
-      neg = true;
-    }
-
-    float sum = 0.0;
-    float fac = 1;
-
-    //whole number part
-    float pow = 1;
-    for (int i = x; i > 0; i--,x--) {
-      pow = pow * 2.71828;
-    }
-    //x++;
-  
-    // First 15 terms in Taylor Series
-    for (int i = 0; i < 15; i++) {
-      sum = sum + power(x, i) / fac;
-      fac = fac * (i + 1);
-    }
-
-    if(neg)
-      return 1 / (sum * pow);
-    else
-      return sum * pow;
-  }
-
-  float custom_tanh(float num) __attribute__((always_inline));
-  inline float custom_tanh(float num){
-    float value = epow(2 * num);
-    value = (value - 1) / (value + 1);
-    return value;
-  }
-
-  float max(float v1, float v2) __attribute__((always_inline));
-  inline float max(float v1, float v2){
-    if (v1 < v2)
-      return v2;
-    else
-      return v1;
-  }
-
-  void *tensorReluCPU(void *input_ptr) {
-    Tensor *input = (Tensor *)input_ptr;
-
-    float *input_data = (float *)input->host_data;
-    size_t num_elems = input->num_elems;
-    for (size_t i = 0; i < num_elems; i++) {
-      if (input_data[i] < 0) {
-	input_data[i] = 0;
-      }
-    }
-
-    return input;
-  }
-
-  void *tensorTanhCPU(void *input_ptr) {
-    Tensor *input = (Tensor *)input_ptr;
-
-    float *input_data = (float *)input->host_data;
-    size_t num_elems = input->num_elems;
-    for (size_t i = 0; i < num_elems; i++) {
-      input_data[i] = custom_tanh(input_data[i]);
-    }
-
-    return input;
-  }
-
-  void *tensorRelu2CPU(void *input_ptr, float min, float max) {
-    Tensor *input = (Tensor *)input_ptr;
-
-    float *input_data = (float *)input->host_data;
-    size_t num_elems = input->num_elems;
-    for (size_t i = 0; i < num_elems; i++) {
-      if (input_data[i] < min) {
-	input_data[i] = min;
-      }
-      if (input_data[i] > max) {
-	input_data[i] = max;
-      }
-    }
-
-    return input;
-  }
-
-  void *tensorPoolingCPU(void *input_ptr, int poolFunction, int window_height,
-			 int window_width, int vertical_pad, int horizontal_pad,
-			 int vertical_stride, int horizontal_stride) {
- 
-    Tensor *input = (Tensor *)input_ptr;
-    float *input_data = (float *)input->host_data;
-
-    int batch_size = input->dims.dim_sizes[0];
-    int channels = input->dims.dim_sizes[1];
-    int image_height = input->dims.dim_sizes[2];
-    int image_width = input->dims.dim_sizes[3];
-
-    int output_height =
-      1 + ((image_height - window_height + 2 * vertical_pad) / vertical_stride);
-    int output_width = 1 + ((image_width - window_width + 2 * horizontal_pad) /
-			    horizontal_stride);
-
-    int center_x = (window_width - 1) / 2 - horizontal_pad;
-    int center_y = (window_height - 1) / 2 - vertical_pad;
-    int x_radius = (window_width - 1) / 2;
-    int y_radius = (window_height - 1) / 2;
-
-    Tensor *output = (Tensor *) create4DTensorInternal(0, 0, batch_size, channels,
-						       output_height, output_width);
-    float *output_data = (float *)output->host_data;
-
-    for (int b = 0; b < batch_size; b++) {
-      for (int ch = 0; ch < channels; ch++) {
-	int ii = 0, jj = 0;
-	for (int r = center_y; r < image_height + vertical_pad - y_radius;
-	     r += vertical_stride) {
-	  for (int c = center_x; c < image_width + horizontal_pad - x_radius;
-	       c += horizontal_stride) {
-	    float val;
-	    if (poolFunction == 0)
-	      val = -3.40282e+38; // assuming values never fall below min value of float
-	    else
-	      val = 0;
-
-	    for (int i = r - y_radius, ki = 0; ki < window_height; i++, ki++) {
-	      for (int j = c - x_radius, kj = 0; kj < window_width; j++, kj++) {
-		if (i >= 0 && j >= 0 && i < image_height && j < image_width) {
-		  if (poolFunction == 0)
-		    val = max(
-			      val,
-			      input_data[b * (channels * image_height * image_width) +
-					 ch * (image_height * image_width) +
-					 i * image_width + j]);
-		  else
-		    val +=
-                      input_data[b * (channels * image_height * image_width) +
-                                 ch * (image_height * image_width) +
-                                 i * image_width + j];
-		}
-	      }
-	    }
-	    if (poolFunction == 1)
-	      val /= window_height * window_width;
-
-	    output_data[b * (channels * output_height * output_width) +
-			ch * (output_height * output_width) + ii * output_width +
-			jj] = val;
-	    jj++;
-	    if (jj == output_width) {
-	      jj = 0;
-	      ii++;
-	    }
-	  }
-	}
-      }
-    }
-
-    return output;
-  }
-
-  void *tensorSoftmaxCPU(void *input_ptr) {
-    Tensor *input = (Tensor *)input_ptr;
-
-    float *logits = (float *)input->host_data;
-
-    int n = input->dims.dim_sizes[0];
-    int c = input->dims.dim_sizes[1];
-
-    for (int i = 0; i < n; i++) {
-      float x = 0;
-      for (int j = 0; j < c; j++)
-	x += epow(logits[i * c + j]);
-
-      for (int j = 0; j < c; j++)
-	logits[i * c + j] = epow(logits[i * c + j]) / x;
-    }
-
-    return input;
-  }
-
-  void* __attribute__((always_inline)) tensorConvolutionCPU(void *input_ptr, void *filter_ptr, int vertical_pad,
-			     int horizontal_pad, int vertical_stride,
-			     int horizontal_stride, int conv_mode,
-			     int compute_precision) {
- 
-    Tensor *input = (Tensor *)input_ptr;
-    Tensor *filter = (Tensor *)filter_ptr;
-
-    float *image = (float *)input->host_data;
-    float *kernels = (float *)filter->host_data;
-
-    int batch_size = input->dims.dim_sizes[0];
-    int channels = input->dims.dim_sizes[1];
-    int image_height = input->dims.dim_sizes[2];
-    int image_width = input->dims.dim_sizes[3];
-    int num_filters = filter->dims.dim_sizes[0];
-    int kernel_height = filter->dims.dim_sizes[2];
-    int kernel_width = filter->dims.dim_sizes[3];
-
-    // kernel centers
-    int center_x = (kernel_width - 1) / 2 - horizontal_pad;
-    int center_y = (kernel_height - 1) / 2 - vertical_pad;
-
-    int x_radius = (kernel_width - 1) / 2;
-    int y_radius = (kernel_height - 1) / 2;
-    int output_height =
-      1 + ((image_height - kernel_height + 2 * vertical_pad) / vertical_stride);
-    int output_width = 1 + ((image_width - kernel_width + 2 * horizontal_pad) /
-			    horizontal_stride);
-
-    Tensor *output = (Tensor *) create4DTensorInternal(0, 0, batch_size, num_filters,
-						 output_height, output_width);
-    float *output_data = (float *)output->host_data;
-
-    for (int b = 0; b < batch_size; b++) {
-      for (int f = 0; f < num_filters; f++) {
-	int ii = 0, jj = 0;
-	for (int r = center_y; r < image_height + vertical_pad - y_radius;
-	     r += vertical_stride) {
-	  for (int c = center_x; c < image_width + horizontal_pad - x_radius;
-	       c += horizontal_stride) {
-
-	    float sum = 0;
-	    for (int ch = 0; ch < channels; ch++) {
-	      for (int i = r - y_radius, ki = 0; ki < kernel_height; i++, ki++) {
-		for (int j = c - x_radius, kj = 0; kj < kernel_width; j++, kj++) {
-		  if (i >= 0 && j >= 0 && i < image_height && j < image_width) {
-		    sum += image[b * (channels * image_height * image_width) +
-				 ch * (image_height * image_width) +
-				 i * image_width + j] *
-		      kernels[f * (channels * kernel_height * kernel_width) +
-			      ch * (kernel_height * kernel_width) +
-			      ki * kernel_width + kj];
-		  }
-		}
-	      }
-	    }
-	    output_data[b * (num_filters * output_height * output_width) +
-			f * (output_height * output_width) + ii * output_width +
-			jj] = sum;
-	    jj++;
-	    if (jj == output_width) {
-	      jj = 0;
-	      ii++;
-	    }
-	  }
-	}
-      }
-    }
-
-    return output;
-  }
-
-
-}
diff --git a/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/tensor_runtime.cu b/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/tensor_runtime.cu
deleted file mode 100644
index 5f17cefeed77f15cd0af13ac8e0ae0d3474b6873..0000000000000000000000000000000000000000
--- a/llvm/projects/hpvm-tensor-rt/tensor_runtime/src/tensor_runtime.cu
+++ /dev/null
@@ -1,2122 +0,0 @@
-/* This file includes the API implementation of the HPVM tensor runtime built on cublas, cudnn
-**
-**  Author: Hashim Sharif
-**  Email: hsharif3@illinois.edu
-*/
-
-#include <stdio.h>
-#include <stdarg.h>
-#include <cstdio>
-#include <cstdlib>
-#include <cmath>
-#include <ctime>
-#include <cfloat>
-#include <algorithm>
-#include <chrono>
-#include <iomanip>
-#include <iostream>
-#include <map>
-#include <memory>
-#include <random>
-#include <sstream>
-#include <string>
-#include <vector>
-
-#include <cuda_runtime.h>
-#include <device_launch_parameters.h>
-
-#include <cublas_v2.h>
-#include <cudnn.h>
-#include <cublas_api.h>
-#include <cuda_fp16.h>
-#include <driver_types.h>
-
-
-// Tensor runtime header files
-#include "../include/tensor_runtime.h"
-#include "../include/tensor_utils.cu"
-#include "../include/debug.h"
-#include "../include/profiling.h"
-#include "../include/fp16_conversion.h"
-#include "../include/global_data.h"
-#include "../include/error.h"
-#include "../include/tensor.h"
-#include "../include/op_overheads.h"
-#include "../include/half_precision_api.h"
-#include "../include/approx_techniques.h"
-#include "../include/hpvm-rt-controller.h"
-#include "../include/approxhpvm_runtime_utils.h" 
-
-#include "../include/approx_simulation.h"
-
-// Image tensor runtime implementation
-#include "img_tensor_runtime.cu"
-
-//** Potential Improvements:
-//   1) Add support for dataypes beyond floats and half 
-//   2) Support for more CUDNN operations
-
-
-
-void llvm_hpvm_initTensorRt(int gpuid){
-
-  if(!runtime_initialized){
-    
-    printf("INITIALIZING GPU %d \n", gpuid);
-    // NOTE: Setting the target GPU. Can we use multiple GPUs?
-    checkCudaErrors(cudaSetDevice(gpuid));
-    // Initializing cuDNN and cuBlas handles
-    checkCudaErrors(cublasCreate(&cublasHandle));
-    checkCUDNN(cudnnCreate(&cudnnHandle));
-
-
-#ifdef PROMISE_TUNER_ENABLED
-    //    readOpenTunerFlags("opentuner_flags");
-    readOpenTunerFlags("promise_flags");
-#endif
-
-
-#ifdef ERROR_INJECTION_ENABLED
-    readOpenTunerFlags("opentuner_flags");
-#endif
-
-    
-    runtime_initialized = true;
-  }
-  
-}
-
-
-void llvm_hpvm_cleanupTensorRt(){
-  DEBUG("\**** llvm_hpvm_cleanupTensorRt ***\n");
-  dumpAccuracyNorms();
-}
-
-
-void llvm_hpvm_initApproxhpvmRt(int gpuid){
-  llvm_hpvm_initTensorRt(gpuid);
-  approxhpvm_runtime_mode = true;
-}
-
-void llvm_hpvm_cleanupApproxhpvmRt(){
-
-}
-
-
-
-void dumpAccuracyNorms(){
-
-  #ifdef ERROR_INJECTION_ENABLED
-  
-  
-  #endif
-
-  dump_result("accuracy_summary");
-
-}
-
-
-// Returns the number of GPUs active on the platform
-int getGPUCount(){
-  int num_gpus;
-  checkCudaErrors(cudaGetDeviceCount(&num_gpus));
-  return num_gpus;
-}
-
-
-
-void clearTensorMap(){
-
-  tensors_ptr.clear();
-  host_ptr.clear();
-  obj_ptr.clear();
-}
-
-
-void startMemTracking(){
-
-  tensors_ptr.clear();
-  host_ptr.clear();
-  obj_ptr.clear();
-
-  tracked_tensors.clear();
-}
-
-
-void freeOutputTensors(){
-
-  DEBUG("**** Freeing Ouput Tensors *** \n");
-  for(int i = 0; i < tensors_ptr.size(); i++){
-    cudaFree(tensors_ptr[i]);
-    tensors_ptr[i] = NULL;
-  }
-
-  for(int i = 0; i < host_ptr.size(); i++){
-    free(host_ptr[i]);
-    host_ptr[i] = NULL;
-  }
-  
-  for(int i = 0; i < obj_ptr.size(); i++){
-    free(obj_ptr[i]);
-    obj_ptr[i] = NULL;
-  }
-}
-
-
-
-void clearOpCounter(){
-  total_ops = 0;
-  op_counter = 0;
-  op_accuracies.clear();
-}
-
-
-
-void freeBatchMemory(){
-  // Free allocated memory for the current mini-batch
-  freeOutputTensors();
-  // Reinitialize couter for OpenTuner flags - next mini-batch of execution
-  op_counter = 0;
-  // Clearing profiling data map
-  func_counters.clear();
-}
-
-
-
-
-// FIXIT: Fix any assumptions on the NCHW format
-// TODO: benchmark split performance and check if it is prohibitively high?
-void** tensorSplit(void* tensor_ptr, int num_splits, int split_dim){
-
-  INFO("*** TensorSplit \n");  
-  profileEvent("tensorSplit");
-
-  Tensor* tensor = (Tensor*) tensor_ptr;
-  
-  deviceToHostCopy(tensor); // Splitting done on the host
-
-  Tensor** splits = (Tensor**) malloc(sizeof(Tensor*) * num_splits);
-  size_t* dim_sizes = (size_t*) malloc(sizeof(size_t) * tensor->dims.num_dims);
-  for(unsigned int i = 0; i < tensor->dims.num_dims; i++){
-    dim_sizes[i] = tensor->dims.dim_sizes[i];
-  }
-
-  
-  dim_sizes[split_dim] = tensor->dims.dim_sizes[split_dim] / num_splits;
-  if(dim_sizes[split_dim] < 1)
-    ERROR("Split Dimension < 1 after splitting");
-
-  size_t copy_size = getTypeSize(tensor->data_type);
-  for(unsigned int i = split_dim; i < tensor->dims.num_dims; i++){
-    copy_size = copy_size * dim_sizes[i];
-  }
-  
-  for(unsigned int i = 0; i < num_splits; i++){
-    // FIXIT: Don't be specific to 4D tensors
-    // NOTE: Using same data format (NHWC/NCHW) for the split tensors
-    INFO("dim_sizes[0] = %d, dim_sizes[1] = %d, dim_sizes[2] = %d, dim_sizes[3] = %d \n",
-	 dim_sizes[0], dim_sizes[1], dim_sizes[2], dim_sizes[3]);
-
-    Tensor* split = (Tensor*) create4DTensor(tensor->data_type, tensor->data_format,
-					  dim_sizes[0], dim_sizes[1], dim_sizes[2], dim_sizes[3]);
-    
-    size_t copy_start = i * copy_size;
-    size_t copy_stride = num_splits * copy_size;
-    INFO("copy_size = %d, copy_start = %d, copy_stride = %d, tensor->size_in_bytes = %d \n",
-	 copy_size, copy_start, copy_stride, tensor->size_in_bytes);
-
-    int index = 0;
-    while(copy_start + copy_size <= tensor->size_in_bytes){
-      memcpy(((char*) split->host_data + (index * copy_size)),
-	     ((char*)tensor->host_data + copy_start),
-	     copy_size);
-      copy_start += copy_stride;
-      index++;
-    }
-   	
-    splits[i] = split;     
-  }
-
-  profileEvent("tensorSplit_end", true);
-
-  return (void**) splits;
-}
-
-
-void* tensorConcat(void** tensors_ptr, int num_splits, int split_dim){
-
-  INFO("*** TensorConcat \n");  
-  profileEvent("tensorConcat");
-
-  Tensor** tensors = (Tensor**) tensors_ptr;
-
-  for(int i = 0; i < num_splits; i++){
-    deviceToHostCopy(tensors[i]); // Concatenation done on the host
-  }
-  
-  // The no of dimensions of concatenated tensor are the same
-  size_t* dim_sizes = (size_t*) malloc(sizeof(size_t) * tensors[0]->dims.num_dims);
-  for(unsigned int i = 0; i < tensors[0]->dims.num_dims; i++){
-    dim_sizes[i] = tensors[0]->dims.dim_sizes[i];
-  }
-  
-  size_t copy_size = getTypeSize(tensors[0]->data_type);
-  for(unsigned int i = split_dim; i < tensors[0]->dims.num_dims; i++){
-    copy_size = copy_size * dim_sizes[i];
-  }
-
-  dim_sizes[split_dim] = dim_sizes[split_dim] * num_splits;
-  if(dim_sizes[split_dim] < 1)
-    ERROR("Split Dimension < 1 after concat");
-
-  Tensor* output = (Tensor*) create4DTensor(tensors[0]->data_type, tensors[0]->data_format,
-					 dim_sizes[0], dim_sizes[1], dim_sizes[2], dim_sizes[3]);
-
-  INFO("dim_sizes[0] = %d, dim_sizes[1] = %d, dim_sizes[2] = %d, dim_sizes[3] = %d \n",
-       dim_sizes[0], dim_sizes[1], dim_sizes[2], dim_sizes[3]);
-
-
-  int num_copies = 1;
-  for(unsigned int i = 0; i < split_dim; i++){
-    num_copies = num_copies * dim_sizes[i];
-  }
-  
-  size_t copy_stride = num_splits * copy_size;
-  INFO("copy_size = %d, num_copies = %d, copy_stride = %d, output->size_in_bytes = %d \n",
-       copy_size, num_copies, copy_stride, output->size_in_bytes);
-
-  for(unsigned int i = 0; i < num_copies; i++){
-    // FIXIT: Don't be specific to 4D tensors
-    size_t copy_start = i * copy_stride;
-   
-    for(int j = 0; j < num_splits; j++){
-      struct Tensor* split = tensors[j];
-      memcpy(((char*) output->host_data + copy_start + (j * copy_size)),
-	     ((char*) split->host_data + (i * copy_size)),
-	     copy_size);   
-    }      
-  }
-
-  profileEvent("tensorConcat_end", true);
-
-  return output;
-}
-
-
-
-void* tensorLRN(void* input_ptr, unsigned int LRN_window,
-		double LRN_alpha, double LRN_beta, double LRN_k){
-
-  INFO("*** TensorLRN \n");  
-  profileEvent("tensorLRN");
-
-  Tensor* input = (Tensor*) input_ptr;
-
-  hostToDeviceCopy(input);
-
-  float alpha = 1.0f, beta = 0.0f;
-  cudnnLRNDescriptor_t LRNDesc;
-  checkCUDNN(cudnnCreateLRNDescriptor(&LRNDesc));
-
-  INFO("window = %d, LRN_alpha = %f, LRN_beta = %f, LRN_k = %f \n",
-       LRN_window, LRN_alpha, LRN_beta, LRN_k);
- 
-  
-  checkCUDNN(cudnnSetLRNDescriptor(LRNDesc, LRN_window, LRN_alpha, LRN_beta, LRN_k));
-
-  size_t* dim_sizes = input->dims.dim_sizes;
-  Tensor* output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, 
-			  CUDNN_TENSOR_NCHW, dim_sizes[0], dim_sizes[1],
-			  dim_sizes[2], dim_sizes[3]);
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE); 
-  // NOTE: Necessary to insert the above call for every output tensor
-
-  printTensorDescInfo(input);
-  printTensorDescInfo(output);
-  
-  checkCUDNN(cudnnLRNCrossChannelForward(cudnnHandle, LRNDesc, CUDNN_LRN_CROSS_CHANNEL_DIM1,
-					 &alpha, input->tensor_desc, input->gpu_data,
-					 &beta, output->tensor_desc, output->gpu_data));
-
-  profileEvent("tensorLRN_end", true);
-    
-  return output;
-}
-
-
-void printTensorDims2(void* tensor_ptr){
-
-  struct Tensor* tensor = (struct Tensor*) tensor_ptr;
-
-  printf("Num_elems = %lu \n", tensor->num_elems);
-  for (int i = 0; i < tensor->dims.num_dims; i++){
-    printf("dim[%d] = %lu \n", i, tensor->dims.dim_sizes[i]);
-  }
-}
-
-
-
-
-// FIXIT: tensorAdd currently only works for 4D tensors
-void* tensorAdd(void* x_ptr, void* bias_ptr){
-  
-  Tensor* x = (Tensor*) x_ptr;
-  Tensor* bias = (Tensor*) bias_ptr;
-  
-  INFO("*** TensorAdd \n");  
-  profileEvent("Add");
-    
-  float alpha = 1.0f;
-  //float beta = 0.0f;
-  hostToDeviceCopy(x);
-  hostToDeviceCopy(bias);
-
-  convertToFP32(x);
-  convertToFP32(bias);
-
-  
-  INFO("x->num_elems = %d \n", x->num_elems);
-  INFO("bias->num_elems = %d \n", bias->num_elems);
-
-  if(cudnnHandle == NULL){
-    ERROR("cudnnHandle NOT initialized!! \n");    
-  }
-  
-  // FIXIT: routine fails for 3D tensors
-  checkCUDNN(cudnnAddTensor(cudnnHandle, &alpha, bias->tensor_desc,
-			    bias->gpu_data, &alpha, x->tensor_desc, x->gpu_data));
-
-  profileEvent("Add_end", true);
-
-  #ifdef ERROR_INJECTION_ENABLED  
-  if(op_counter >= total_ops){
-    ERROR("No accuracy flag found \n");
-  }
-  
-  int op_acc = op_accuracies[op_counter];
-
-  // Forcing 0 error for (Resnet-like) equal dimension adds (Testing-only)
-  
-  //-- if (bias->dims.dim_sizes[0] > 1)
-  //--  op_acc = 0;
-  // Skip errorInjection if explicitly requested
-  //-- if (skip_tensors.find(op_counter) != skip_tensors.end()){
-  //--   op_acc = 0;  
-  //  }
-
-  void* error_norms = tensorAddError(x, op_acc);
-  add_norms(error_norms, "tensorAdd", op_acc);
-  add_bias_overheads(x, op_acc);
-  op_counter++;
-  
-  #endif
-  
-  
-  return x;
-}
-
-
-// FIXIT: Generalize all of the routines for types {half, float, double}
-void* tensorConvolution(void* input_ptr, void* filter_ptr,
-			int vertical_pad, int horizontal_pad,
-			int vertical_stride, int horizontal_stride,
-			int conv_mode, int conv_groups){  
-  
-  INFO("*** TensorConvolution \n");
-  profileEvent("Conv");
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* filter = (Tensor*) filter_ptr;
-  
-  cudnnConvolutionDescriptor_t convDesc;
-  cudnnConvolutionFwdAlgo_t convAlgo;
-  cudnnConvolutionMode_t mode;
-  if(conv_mode == 0)
-    mode = CUDNN_CONVOLUTION;
-  else if(conv_mode == 1)
-    mode = CUDNN_CROSS_CORRELATION;
-
-  mode = CUDNN_CROSS_CORRELATION;
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  float alpha = 1.0f, beta = 0.0f;
-  
-  // TODO: Support other cases;  
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(filter);
-
-  convertToFP32(input);
-  convertToFP32(filter);
-
-  
-  INFO("vertical_stride = %lu, horizontal_stride = %lu \n", vertical_stride, horizontal_stride);  
-
-  checkCUDNN(cudnnCreateConvolutionDescriptor(&convDesc));
-
-  //FIXME: Current hack to preserve backward compatibilty
-  if(conv_groups == 0){
-    conv_groups = 1;
-  }
-  
-  
-  
-  cudnnDataType_t computeType = CUDNN_DATA_FLOAT;
-  // FIXIT: Think if upscaling values need to be configurable?
-  // IMP-FIXIT: Either make mode configurable OR see if CUDNN_CONVOLUTION MODE should be used?
-  checkCUDNN(cudnnSetConvolution2dDescriptor(convDesc,
-					     vertical_pad, horizontal_pad, // conv padding
-					     vertical_stride, horizontal_stride, // conv strides
-					     1, 1, // upscaling values
-					     mode , // mode is configurable
-                                             computeType)); // defines compute precision
-
-  // NOTE: Adding support for grouped convolution
-  checkCUDNN(cudnnSetConvolutionGroupCount(convDesc, conv_groups));
-
-  int n, c, h, w; // output dimensions  
-  // Find dimension of convolution output
-
-  if(input->tensor_desc == NULL || filter->filter_desc == NULL)
-    ERROR("Input or Filter descriptor is NULL");
-    
-  checkCUDNN(cudnnGetConvolution2dForwardOutputDim(convDesc,
-						   input->tensor_desc,
-						   filter->filter_desc,
-						   &n, &c, &h, &w));
-
-    
-  DEBUG("**Output Tensor Dims, n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-
-  Tensor* output;
-  if(input->data_format == CUDNN_TENSOR_NCHW)
-    output = (Tensor*) create4DTensor((cudnnDataType_t) float_type,  
-			              CUDNN_TENSOR_NCHW, n, c, h, w);
-  else if(input->data_format == CUDNN_TENSOR_NHWC){
-    DEBUG("* NHWC Format \n");
-    output = (Tensor*) create4DTensor((cudnnDataType_t) float_type, 
-			              CUDNN_TENSOR_NHWC, n, h, w, c);
-  }
-  else
-    ERROR("Unsupported Tensor Type");
-
-  // NOTE: Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE); 
-  // NOTE: Necessary to insert the above call for every output tensor
-    
-  DEBUG("tensor->data_type = %d, tensor->data_format = %d, N = %d, C = %d, H = %d, W = %d \n",
-	output->data_type, output->data_format, output->dims.dim_sizes[0],
-	output->dims.dim_sizes[1],
-	output->dims.dim_sizes[2], output->dims.dim_sizes[3]);
-
-  if(convDesc == NULL || input->tensor_desc == NULL ||
-     filter->filter_desc == NULL || output->tensor_desc == NULL)
-    ERROR("NULL descriptor! \n");
-
-
-  // Debugging info prints
-  printTensorDescInfo(input);
-  printTensorDescInfo(filter);
-  printTensorDescInfo(output);
-
-  // NOTE-FIXIT: function failing for NHWC formats - perhaps some CUDNN support is lacking
-  checkCUDNN(cudnnGetConvolutionForwardAlgorithm(cudnnHandle,
-						 input->tensor_desc,
-						 filter->filter_desc,
-						 convDesc,
-						 output->tensor_desc,
-						 CUDNN_CONVOLUTION_FWD_PREFER_FASTEST,	 
-						 //CUDNN_CONVOLUTION_FWD_NO_WORKSPACE,
-						 0,
-						 &convAlgo));
-
-  
-  DEBUG("ConvAlgo = %d, FFT = %d, GEMM = %d, WINOGRAD = %d \n", convAlgo,
-	 CUDNN_CONVOLUTION_FWD_ALGO_FFT, CUDNN_CONVOLUTION_FWD_ALGO_GEMM,
-	 CUDNN_CONVOLUTION_FWD_ALGO_WINOGRAD);
-	 
-
-  // FIXIT: Algo shouldn't be hardcoded
-  //convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_PRECOMP_GEMM;
-  convAlgo = CUDNN_CONVOLUTION_FWD_ALGO_IMPLICIT_GEMM;
-
-  size_t workspace_size;
-  checkCUDNN(cudnnGetConvolutionForwardWorkspaceSize(cudnnHandle,
-						     input->tensor_desc,
-						     filter->filter_desc,
-						     convDesc,
-						     output->tensor_desc,
-						     convAlgo,
-						     &workspace_size));
-
-  // Allocating memory for the convolution workspace
-  void* workspace;
-  checkCudaErrors(cudaMalloc(&workspace, workspace_size)); 
-  DEBUG("workspace size = %d \n", workspace_size);
-
-
-  checkCUDNN(cudnnConvolutionForward(cudnnHandle, &alpha, input->tensor_desc,
-				     input->gpu_data, filter->filter_desc, filter->gpu_data,
-				     convDesc, convAlgo, workspace, workspace_size,
-				     &beta, output->tensor_desc, output->gpu_data));
-		       
-  profileEvent("Conv_end", true);
-
-
-  #ifdef ERROR_INJECTION_ENABLED
-
-  if(op_counter >= total_ops){
-    ERROR("No accuracy flag found \n");
-  }
-  
-  int op_acc = op_accuracies[op_counter];
-
-  // Ignore Error Injection for Depthwise Convolution  
-  /*if (conv_groups > 1){
-    op_acc = 0;
-  }
-  */
-
-  
-  void* error_norms = tensorAddError(output, op_acc);
-  add_norms(error_norms, "tensorConv", op_acc);
-  add_conv_overheads(input, filter, vertical_stride, horizontal_stride, op_acc);
-
-  op_counter++;
-  
-  #endif
-  
-  
-  return output;
-}
-
-
-
-// NOTE: Supports Max and Avg Pooling
-void* tensorPooling(void* input_ptr,
-		    int poolFunction,
-		    int window_height, int window_width,
-		    int vertical_pad, int horizontal_pad,
-		    int vertical_stride, int horizontal_stride){
-
-  INFO("*** TensorPooling \n");
-  profileEvent("Pool");
-
-  Tensor* input = (Tensor*) input_ptr;
-
-  cudnnPoolingDescriptor_t poolDesc;
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  float alpha = 1.0f, beta = 0.0f;
-
-  hostToDeviceCopy(input);
-
-  convertToFP32(input);
-
-  
-  checkCUDNN(cudnnCreatePoolingDescriptor(&poolDesc));            
-
-  int n = input->dims.dim_sizes[0];
-  int c = input->dims.dim_sizes[1];
-  int h = (input->dims.dim_sizes[2] + (2 * vertical_pad) - window_height) / vertical_stride;
-  h = h + 1;
-  int w = (input->dims.dim_sizes[3] + (2 * horizontal_pad) - window_width) / horizontal_stride;
-  w = w + 1;
-
-  DEBUG("n = %d, c = %d, h = %d, w = %d \n", n, c, h, w);
-  
-  // FIXIT: Don't be specific to floats
-  Tensor* output = (Tensor*) create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, n, c, h, w);
-  // Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE); 
-
-  // FIXIT: Fix being specific to CUDNN_DATA_FLOAT and NCHW format
-  // FIXIT: Is this setTensor even needed?
-  checkCUDNN(cudnnSetTensor4dDescriptor(output->tensor_desc,
-					CUDNN_TENSOR_NCHW,
-					CUDNN_DATA_FLOAT,
-					n, c,
-					h, w));
-
-
-  cudnnPoolingMode_t pool_mode;
-  if(poolFunction == 0)
-    pool_mode = CUDNN_POOLING_MAX;
-  else if(poolFunction == 1)
-    pool_mode = CUDNN_POOLING_AVERAGE_COUNT_EXCLUDE_PADDING;
-
-  
-  
-  // FIXIT: Make the pool function (max, min, avg) configurable
-  checkCUDNN(cudnnSetPooling2dDescriptor(poolDesc,
-					 //CUDNN_POOLING_MAX,
-					 pool_mode,
-					 CUDNN_PROPAGATE_NAN,
-					 window_height, window_width,
-					 vertical_pad, horizontal_pad,
-					 vertical_stride, horizontal_stride));
-     
-  checkCUDNN(cudnnPoolingForward(cudnnHandle, poolDesc, &alpha, input->tensor_desc,
-				 input->gpu_data, &beta, output->tensor_desc, output->gpu_data));
-
-  profileEvent("Pool_end", true);
-
-
-  #ifdef ERROR_INJECTION_ENABLED
-
-  if(op_counter >= total_ops){
-    ERROR("No accuracy flag found \n");
-  }
-  
-  int op_acc = op_accuracies[op_counter];
-  void* error_norms = tensorAddError(output, op_acc);
-  add_norms(error_norms, "tensorPooling", op_acc);
-  add_pool_overheads(input, window_height, vertical_stride, op_acc);
-
-  op_counter++;
-  
-  #endif
-
-  
-  return output;
-}
-
-
-
-
-void* tensorGemmCPU(void* lhs_ptr, void* rhs_ptr){
-
-  INFO("*** TensorGemmCPU \n");
-
-  Tensor* lhs = (Tensor*) lhs_ptr;
-  Tensor* rhs = (Tensor*) rhs_ptr;
-  
-  // The operation is done on the CPU
-  deviceToHostCopy(lhs);
-  deviceToHostCopy(rhs);
-
-  if(lhs->data_type != CUDNN_DATA_FLOAT){
-    ERROR("Currently only Floating point is supported ");
-  }
-  
-  profileEvent("tensorGemmCPU");
-  
-  INFO("rhs->dims.num_dims = %d \n", rhs->dims.num_dims);
-  INFO("lhs->dims.num_dims = %d \n", lhs->dims.num_dims);
-
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  //float alpha = 1.0f;
-  // float beta = 0.0f;
-  // 'm' holds the batch dimension - assuming NCHW format Tensors
-  int m = lhs->dims.dim_sizes[0];
-  // The rhs must be a 2D tensor
-  int n = rhs->dims.dim_sizes[rhs->dims.num_dims-1]; // output neurons
-  int k = 1;
-  // Flattening the dimensions after the batch dimension
-  // NOTE: Allowing any number of dimensions > 2 for lhs
-  for (int j = 1 ; j < lhs->dims.num_dims; j++){
-    k = k * lhs->dims.dim_sizes[j]; // input neurons
-  }
-
-  int rhs_k = rhs->dims.dim_sizes[rhs->dims.num_dims-2];
-  // Dimension-note: Check if k is same across the two tensors
-  INFO("m = %d, n = %d, k = %d \n", m, n, k);
-  if(rhs_k != k){
-    ERROR("rhs=%d and lhs=%d columns/rows don't match", rhs_k, k);
-  }
-
-  // NOTE: Creating a 4D tensor to be compatible with later called cuDNN routines
-  Tensor* output = (Tensor*) create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, m, n, 1, 1);
-  // Changing output tensor placement from host to device
-  changeTensorPlacement(output, HOST); 
-
-  float* lhs_arr = (float*) lhs->host_data;
-  float* rhs_arr = (float*) rhs->host_data;
-  float* output_arr = (float*) output->host_data;
-  
-  for(int i = 0; i < m; i++){
-    for(int j = 0; j < n; j++){
-      float sum = 0.0;
-      for(int l = 0; l < k; l++){
-	float mul = lhs_arr[i*k+l] * rhs_arr[l*n+j];
-	sum = sum + mul;
-      }
-      output_arr[i*n+j] = sum;
-    }
-  }
-      
-   
-  profileEvent("tensorGemmCPU_end", true);
-  
-  return output;
-}
-
-
-
-// Reference: https://gist.github.com/peterwittek/6303527
-void* tensorGemmGPU(void* lhs_ptr, void* rhs_ptr ){ //, void* result_tensor){
-
-  INFO("*** TensorGemmGPU \n");
-  profileEvent("Mul");
-
-  Tensor* lhs = (Tensor*) lhs_ptr;
-  Tensor* rhs = (Tensor*) rhs_ptr;
-
-
-  INFO("rhs->dims.num_dims = %d \n", rhs->dims.num_dims);
-  INFO("lhs->dims.num_dims = %d \n", lhs->dims.num_dims);
-
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  float alpha = 1.0f, beta = 0.0f;
-  // 'm' holds the batch dimension - assuming NCHW format Tensors
-  int m = lhs->dims.dim_sizes[0];
-  // The rhs last dimension must contain the neurons
-  int n = rhs->dims.dim_sizes[rhs->dims.num_dims-1]; // output neurons
-  int k = 1;
-  
-  // Flattening the dimensions after the batch dimension
-  // NOTE: Allowing any number of dimensions > 2 for lhs
-  for (int j = 1 ; j < lhs->dims.num_dims; j++){
-    k = k * lhs->dims.dim_sizes[j]; // input neurons
-  }
-
-  int rhs_k = rhs->dims.dim_sizes[rhs->dims.num_dims-2];
-  // Dimension-note: Check if k is same across the two tensors
-  INFO("m = %d, n = %d, k = %d \n", m, n, k);
-  if(rhs_k != k){
-    ERROR("rhs=%d and lhs=%d columns/rows don't match", rhs_k, k);
-  }
-
-  Tensor* output = NULL;
-  DEBUG("Creating new TENSOR * \n");
-  output = (Tensor*) create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, m, n, 1, 1);
-
-  
-  /* else{
-    DEBUG("Reusing TENSOR *\n");
-    // FIXIT: Add Assertion to check for null pointer and dimension matching
-    output = (Tensor*) result_tensor;
-    // FIXIT: output value is trashing - Is this deallocated?
-    INFO("output->num_elems = %lu \n", output->data_type);
-  }
-  */
-  
-  DEBUG("Changing placement *\n");
-  // Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE); 
-
-  DEBUG("Changed Placement * \n\n");
-
-  hostToDeviceCopy(lhs);
-  hostToDeviceCopy(rhs);
-
-  convertToFP32(lhs);
-  convertToFP32(rhs);
-
-  
-  DEBUG("CuBlasSgemm *\n");
-   
-  // INFO: cuBlas uses column-major format
-  // INFO: The leading dimension is just the FIRST Dimension
-  // IMP: output is N * M in column-major format, M*N in row-major - what cuDNN expects
-  checkCudaErrors(cublasSgemm(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-			      n, m, k,
-			      &alpha,
-			      (float*) rhs->gpu_data, n,
-			      (float*) lhs->gpu_data, k,
-			      &beta,
-			      (float*) output->gpu_data, n));  
-
-  
-  profileEvent("Mul_end", true);
-
-
-
-  #ifdef ERROR_INJECTION_ENABLED
-
-  if(op_counter >= total_ops){
-    ERROR("No accuracy flag found \n");
-  }
-  
-  int op_acc = op_accuracies[op_counter];
-  
-  void* error_norms = tensorAddError(output, op_acc);
-  add_norms(error_norms, "tensorGemm", op_acc);
-  add_gemm_overheads(lhs_ptr, rhs_ptr, op_acc);
-
-  op_counter++;
-  
-  #endif
- 
-  
-  return output;
-}
-
-
-
-
-
-
-
-void* tensorGemm(void* lhs_ptr, void* rhs_ptr){
-
-  INFO("*** TensorGemm \n");
-  profileEvent("tensorGemm");
-
-  Tensor* lhs = (Tensor*) lhs_ptr;
-  Tensor* rhs = (Tensor*) rhs_ptr;
-    
-  INFO("rhs->dims.num_dims = %d \n", rhs->dims.num_dims);
-  INFO("lhs->dims.num_dims = %d \n", lhs->dims.num_dims);
-
-  // FIXIT: Need to be more aware of the implications of alpha and beta
-  float alpha = 1.0f, beta = 0.0f;
-  // 'm' holds the batch dimension - assuming NCHW format Tensors
-  int m = lhs->dims.dim_sizes[0];
-  // The rhs last dimension must contain the neurons
-  int n = rhs->dims.dim_sizes[rhs->dims.num_dims-1]; // output neurons
-  int k = 1;
-  // Flattening the dimensions after the batch dimension
-  // NOTE: Allowing any number of dimensions > 2 for lhs
-  for (int j = 1 ; j < lhs->dims.num_dims; j++){
-    k = k * lhs->dims.dim_sizes[j]; // input neurons
-  }
-
-  int rhs_k = rhs->dims.dim_sizes[rhs->dims.num_dims-2];
-  // Dimension-note: Check if k is same across the two tensors
-  INFO("m = %d, n = %d, k = %d \n", m, n, k);
-  if(rhs_k != k){
-    ERROR("rhs=%d and lhs=%d columns/rows don't match", rhs_k, k);
-  }
-
-  // NOTE: Creating a 4D tensor to be compatible with later called cuDNN routines
-  Tensor* output = (Tensor*) create4DTensor(CUDNN_DATA_FLOAT, CUDNN_TENSOR_NCHW, m, n, 1, 1);
-  // Changing output tensor placement from host to device
-  changeTensorPlacement(output, DEVICE); 
-
-  hostToDeviceCopy(lhs);
-  hostToDeviceCopy(rhs);
-
-  // NOTE: cuBlas uses column-major format
-  // NOTE: The leading dimension is the FIRST Dimension
-  // NOTE: The output is N * M in column-major format, M*N in row-major - what cuDNN expects
-  checkCudaErrors(cublasSgemm(cublasHandle, CUBLAS_OP_T, CUBLAS_OP_N,
-			      n, m, k,
-			      &alpha,
-			      (float*) rhs->gpu_data, k,
-			      (float*) lhs->gpu_data, k,
-			      &beta,
-			      (float*) output->gpu_data, n));
-  
-  profileEvent("tensorGemm_end", true);
-  
-  return output;
-}
-
-
-
-
-// FIXIT: Add dimension check assertions throughout the code
-void* tensorGemmBias(void* input_ptr, void* bias_ptr){
-
-  INFO("*** TensorGemmBias \n");
-  profileEvent("tensorGemmBias");
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* bias = (Tensor*) bias_ptr;  
-
-  // NOTE: beta is set to 1 to append to input
-  // C = A * B + Beta * C
-  float alpha = 1.0f, beta = 1.0f;
-  // 'm' holds the batch dimension - assuming NCHW format Tensors
-  int m = input->dims.dim_sizes[0];
-  // The bias must be a 2D tensor
-  int n = bias->dims.dim_sizes[bias->dims.num_dims - 1]; // output neurons
-
-  INFO("m = %d, n = %d \n", m, n);
-  
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(bias);
-
-  struct Tensor* onevec = (Tensor*) create2DTensor(CUDNN_DATA_FLOAT, m, 1);
-  fillOnes(onevec);
-  hostToDeviceCopy(onevec);
-  
-  // NOTE: cuBlas uses column-major format
-  // NOTE: The leading dimension is just the FIRST Dimension
-  checkCudaErrors(cublasSgemm(cublasHandle, CUBLAS_OP_N, CUBLAS_OP_N,
-			      n, m, 1,
-			      &alpha,
-			      (float*) bias->gpu_data, n,
-			      (float*) onevec->gpu_data, 1,
- 			      &beta,
-			      (float*) input->gpu_data, n));
-
-  profileEvent("tensorGemmBias_end", true);
-  
-  return input;
-}
-
-
-void* tensorRelu(void* input_ptr){
-
-  INFO("*** TensorRelu \n");
-  profileEvent("Relu");
-
-  Tensor* input = (Tensor*) input_ptr;
-  
-  cudnnActivationDescriptor_t reluDesc;
-  float alpha = 1.0f, beta = 0.0f;
-
-  hostToDeviceCopy(input);
-
-  convertToFP32(input);
-  
-  
-  checkCUDNN(cudnnCreateActivationDescriptor(&reluDesc));
-
-  checkCUDNN(cudnnSetActivationDescriptor(reluDesc, CUDNN_ACTIVATION_RELU,
-					  CUDNN_PROPAGATE_NAN, 0.0));
-
-  checkCUDNN(cudnnActivationForward(cudnnHandle, reluDesc, &alpha,
-				    input->tensor_desc, input->gpu_data, &beta,
-				    input->tensor_desc, input->gpu_data));
-
-  profileEvent("Relu_end", true);
-
-
-  #ifdef ERROR_INJECTION_ENABLED
-  
-  if(op_counter >= total_ops){
-    ERROR("No accuracy flag found \n");
-  }
-  
-  int op_acc = op_accuracies[op_counter];
-    
-  void* error_norms = tensorAddError(input, op_acc);
-  add_norms(error_norms, "tensorRelu", op_acc);
-  add_relu_overheads(input, op_acc);
-  op_counter++;  
-  #endif
-  
-
-  return input;
-}
-
-
-// Think: Should Softmax be broken into multiple IR operations?
-void* tensorSoftmax(void* input_ptr){
-
-  INFO("*** TensorSoftmax \n");
-  profileEvent("Softmax");
-
-  Tensor* input = (Tensor*) input_ptr;
-  float alpha = 1.0f, beta = 0.0f;
-
-  hostToDeviceCopy(input);
-  convertToFP32(input); 
- 
-    
-  // IMP: CUDNN_SOFTMAX_ACCURATE can be replaced with a less acurate CUDNN_SOFTMAX_FAST
-  checkCUDNN(cudnnSoftmaxForward(cudnnHandle, CUDNN_SOFTMAX_ACCURATE, CUDNN_SOFTMAX_MODE_CHANNEL,
-				 &alpha, input->tensor_desc, input->gpu_data, &beta,
-				 input->tensor_desc, input->gpu_data));
-
-  deviceToHostCopy(input);  
-  profileEvent("Softmax_end", true);
-  
-  return input;
-}
-
-
-
-__global__ void clipValues(float* A, float min, float max, int n){
-
-  int id = blockIdx.x * blockDim.x + threadIdx.x;
-
-  if(id < n){
-    A[id] = fmaxf(min, A[id]);
-    A[id] = fminf(max, A[id]);
-  }
-}
-
-
-
-void* tensorRelu2(void* input_ptr, float min, float max){
-
-  INFO("*** TensorClippedRelu *** \n");
-  profileEvent("Relu");
-
-  cudnnActivationDescriptor_t reluDesc;
-  float alpha = 1.0f, beta = 0.0f;
-  
-  Tensor* input = (Tensor*) input_ptr;
-
-  hostToDeviceCopy(input);
-
-  convertToFP32(input);
-  
-
-  checkCUDNN(cudnnCreateActivationDescriptor(&reluDesc));
-
-  checkCUDNN(cudnnSetActivationDescriptor(reluDesc, CUDNN_ACTIVATION_CLIPPED_RELU,
-					  CUDNN_PROPAGATE_NAN, max));
-
-  checkCUDNN(cudnnActivationForward(cudnnHandle, reluDesc, &alpha,
-				    input->tensor_desc, input->gpu_data, &beta,
-				    input->tensor_desc, input->gpu_data));
-
-  
-  
-  profileEvent("Relu_end", true);
-
-
-  #ifdef ERROR_INJECTION_ENABLED
-  
-  if(op_counter >= total_ops){
-    ERROR("No accuracy flag found \n");
-  }
-  
-  int op_acc = op_accuracies[op_counter];
-  void* error_norms = tensorAddError(input, op_acc);
-  add_norms(error_norms, "tensorClippedRelu", op_acc);
-  add_relu_overheads(input, op_acc);
-  op_counter++;  
-  #endif
-  
-
-  return input;
-}
-
-
-void* tensorTanh(void* input_ptr){
-
-  INFO("*** TensorTanh \n");
-  profileEvent("Tanh");
-
-  Tensor* input = (Tensor*) input_ptr;
-  
-  cudnnActivationDescriptor_t tanhDesc;
-  float alpha = 1.0f, beta = 0.0f;
-
-  hostToDeviceCopy(input);
-
-  convertToFP32(input);
-
-  
-  checkCUDNN(cudnnCreateActivationDescriptor(&tanhDesc));
-
-  checkCUDNN(cudnnSetActivationDescriptor(tanhDesc, CUDNN_ACTIVATION_TANH,
-					  CUDNN_PROPAGATE_NAN, 0.0));
-
-  checkCUDNN(cudnnActivationForward(cudnnHandle, tanhDesc, &alpha,
-				    input->tensor_desc, input->gpu_data, &beta,
-				    input->tensor_desc, input->gpu_data));
-
-  profileEvent("Tanh_end", true);
-
-
-  #ifdef ERROR_INJECTION_ENABLED
-  
-  if(op_counter >= total_ops){
-    ERROR("No accuracy flag found \n");
-  }
-  
-  int op_acc = op_accuracies[op_counter];
-  void* error_norms = tensorAddError(input, op_acc);
-  add_norms(error_norms, "tensorTanh", op_acc);
-  add_relu_overheads(input, op_acc);
-  op_counter++;  
-  #endif
-  
-
-  return input;
-}
-
-
-
-
-void* tensorBatchNorm(void* input_ptr, void* gamma_ptr, void* beta_ptr,
-		      void* mean_ptr, void* variance_ptr, double epsilon){
-
-  INFO("*** TensorBatchNorm \n");
-  profileEvent("BatchNorm");
-
-  Tensor* input = (Tensor*) input_ptr;
-  Tensor* gamma = (Tensor*) gamma_ptr;
-  Tensor* beta = (Tensor*) beta_ptr;
-  Tensor* mean = (Tensor*) mean_ptr;
-  Tensor* variance = (Tensor*) variance_ptr;
-
-  if (input == NULL || gamma == NULL || beta == NULL || mean == NULL || variance == NULL){
-    ERROR("NULL Input Tensor");
-  }
-  
-  float alpha_val = 1.0f, beta_val = 0.0f;
-  hostToDeviceCopy(input);
-  hostToDeviceCopy(gamma);
-  hostToDeviceCopy(beta);
-  hostToDeviceCopy(mean);
-  hostToDeviceCopy(variance);
-
-  convertToFP32(input);
-
- 
-  
-  checkCUDNN(cudnnBatchNormalizationForwardInference(cudnnHandle, CUDNN_BATCHNORM_SPATIAL,
-						     &alpha_val, &beta_val,
-						     input->tensor_desc, input->gpu_data,
-						     input->tensor_desc, input->gpu_data,
-						     gamma->tensor_desc, gamma->gpu_data,
-						     beta->gpu_data, mean->gpu_data,
-						     variance->gpu_data,
-						     epsilon));
-
-  profileEvent("BatchNorm_end", true);
-
-
-  #ifdef ERROR_INJECTION_ENABLED
-  
-  if(op_counter >= total_ops){
-    ERROR("No accuracy flag found \n");
-  }
-  
-  int op_acc = op_accuracies[op_counter];
-  //op_acc = 0;  
-  void* error_norms = tensorAddError(input, op_acc);
-  add_norms(error_norms, "tensorBatchNorm", op_acc);
-  add_relu_overheads(input, op_acc);
-  op_counter++;  
-  #endif
-  
-
-  return input;
-}
-
-
-
-
-/************* GPU Layer API  *************/
-
-void* ConvLayer_GPU(void* input, 
-		    void* filter, 
-		    void* bias, 
-		    int conv_pad_h, int conv_pad_w, int conv_stride_h, int conv_stride_w,
-		    int pool_id, int pool_size,
-		    int activation_id, // Relu, Tanh, ClipRelu
-		    float out_min, float out_max){ // NOTE: min_val, max_val apply to 'ClippedRelu'
-
-  void* conv_out = tensorConvolution(input, filter,
-				     conv_pad_h, conv_pad_w,
-				     conv_stride_h, conv_stride_w,
-				     1, 0);
-  void* conv_add;
-  if(bias != NULL){
-    conv_add = tensorAdd(conv_out, bias);
-  }
-  else{
-    conv_add = conv_out;
-  }
-
-  void* activation_out;  
-  switch(activation_id){
-  case -1:
-    activation_out = conv_add;
-    INFO("NO Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorTanh(conv_add);
-    break;
-  case 1:
-    activation_out = tensorRelu(conv_add);
-    break;
-  case 2:
-    activation_out = tensorRelu2(conv_add, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-
-
-  void* pool_out = activation_out;
-  // NOTE: Skip pooling on negative pool sizes
-  if(pool_size > 0){
-    //FIXME: Currently only using MaxPooling
-    pool_out = tensorPooling(activation_out, 0, pool_size, pool_size, 0, 0, pool_size, pool_size);
-  }
-  else{
-    pool_out = activation_out;
-  }
-
-  return pool_out;
-}
-
-
-void* FCLayer_GPU(void* input, 
-		  void* weights, 
-		  void* bias, 
-		  int activation_id,
-		  float out_min, float out_max){ // NOTE: min_val, max_val apply to 'ClippedRelu'
-
-  void* gemm_out = tensorGemmGPU(input, weights);
-
-  void* gemmbias_out;
-  if(bias != NULL){
-    gemmbias_out = tensorAdd(gemm_out, bias);
-  }
-  else{
-    gemmbias_out = gemm_out;
-  }
- 
-  void* activation_out;
-  switch(activation_id){
-
-  case -1:
-    activation_out = gemmbias_out;
-    INFO("No Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorTanh(gemmbias_out);
-    break;
-  case 1:
-    activation_out = tensorRelu(gemmbias_out);
-    break;
-  case 2:
-    activation_out = tensorRelu2(gemmbias_out, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-   
-  return activation_out;
-}
-
-
-/*********** PROMISE API **************/
-
-/*
-void* ConvLayer_PROMISE(void* input, float i_min, float i_max,
-			void* filter, float w_min, float w_max,
-			void* bias, float b_min, float b_max,
-			int conv_pad_h, int conv_pad_w, int conv_stride_h, int conv_stride_w,
-			int pool_id, int pool_size,
-			int activation_id, // Relu, Tanh, ClipRelu
-			float out_min, float out_max, int swing){ 
-
-  
-  #ifdef PROMISE_TUNER_ENABLED
-
-  // NOTE: Skip reading file-based error levels for ApproxHPVM wrapper runtime
-  if(!approxhpvm_runtime_mode){
-  
-    if(op_counter >= total_ops){
-      ERROR("No accuracy flag found \n");
-    }
-  
-    swing = op_accuracies[op_counter];
-    op_counter++;
-  }
-  
-  #endif  
-
-  
-  if (swing < 0 || swing > 20){
-    ERROR("Incorrect swing value");
-  }
-
-  
-
-  if(swing < 8){
-    input = quantizeTensorPromise(input, i_min, i_max);
-    filter = quantizeTensorPromise(filter, w_min, w_max);
-    if(bias != NULL)
-      bias = quantizeTensorPromise(bias, b_min, b_max);
-    // aRead error
-    
-    input = addPromiseError(input, swing);
-  }
-
-  
-  void* conv_out;
-  if(swing == 8 || (swing >= 12 && swing <= 15) ){
-    //conv_out = tensorConvPerf(input, filter, conv_pad_h, conv_pad_w,
-    //		              conv_stride_h, conv_stride_w, 1, 1, 1, 0);
-
-    int rows = 2;
-    switch(swing){
-
-    case 12: rows = 5; break;
-    case 13: rows = 4; break;
-    case 14: rows = 3; break;
-    case 15: rows = 2; break;    
-		   
-    default: rows = 2; break;
-    }
-    
-    conv_out = tensorConvPerf2(input, filter, conv_pad_h, conv_pad_w,
-    		              conv_stride_h, conv_stride_w, 1, 1, rows, 0);
-
-    /*void* gold = tensorConvolution(input, filter,
-				   conv_pad_h, conv_pad_w,
-				   conv_stride_h, conv_stride_w,
-				   1, 0);
-
-    Norm_t* norms = calculateNormsTreeReduction((struct Tensor*) conv_out, (struct Tensor*) gold);
-
-    DEBUG("\n-------- l2_norm = %f \n", norms->l2_norm); 
-    */
-
-
-
-  /* -----
-  }
-  else if(swing == 9 || (swing >= 16 && swing <= 19) ){
-    //conv_out = tensorConvPerf(input, filter, conv_pad_h, conv_pad_w,
-    //		              conv_stride_h, conv_stride_w, 1, 1, 0, 1);
-
-
-    int cols = 2;
-    switch(swing){
-
-    case 16: cols = 5; break;
-    case 17: cols = 4; break;
-    case 18: cols = 3; break;
-    case 19: cols = 2; break;    
-		   
-    default: cols = 2; break;
-    }
-
-    
-    conv_out = tensorConvPerf2(input, filter, conv_pad_h, conv_pad_w,
-    		              conv_stride_h, conv_stride_w, 1, 1, 0, cols);
-
-
-    /*void* gold = tensorConvolution(input, filter,
-				   conv_pad_h, conv_pad_w,
-				   conv_stride_h, conv_stride_w,
-				   1, 0);
-
-    Norm_t* norms = calculateNormsTreeReduction((struct Tensor*)conv_out, (struct Tensor*) gold);
-
-    DEBUG("\n-------- l2_norm = %f \n", norms->l2_norm); 
-    */
-
-  /*------
-  }
-  else if(swing == 10){  
-    conv_out = tensorHalfConvolution(input, filter,
-				     conv_pad_h, conv_pad_w,
-				     conv_stride_h, conv_stride_w,
-				     1, 0);
-  }
-  else{
-    conv_out = tensorConvolution(input, filter,
-				 conv_pad_h, conv_pad_w,
-				 conv_stride_h, conv_stride_w,
-				 1, 0);
-  }
-  
-  void* conv_add;
-  if(bias != NULL){
-    if(swing >= 8){  
-      conv_add = tensorHalfAdd(conv_out, bias);
-    }
-    else{
-      conv_add = tensorAdd(conv_out, bias);
-    }
-  }
-  else{
-    conv_add = conv_out;
-  }
-
-  void* pool_out;
-  // NOTE: Skip pooling on negative pool sizes
-  if(pool_size > 0){
-    //FIXME: Currently only using MaxPooling
-    pool_out = tensorHalfPooling(conv_add, 0, pool_size, pool_size, 0, 0, pool_size, pool_size);
-  }
-  else{
-    pool_out = conv_add;
-  }
-  
-  void* activation_out;  
-  switch(activation_id){
-  case -1:
-    activation_out = pool_out;
-    INFO("NO Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorHalfTanh(pool_out);
-    break;
-  case 1:
-    activation_out = tensorHalfRelu(pool_out);
-    break;
-  case 2:
-    activation_out = tensorHalfRelu2(pool_out, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-
-
-  if(swing < 8 && activation_id != -1){
-    activation_out = quantizeTensorPromise(activation_out, out_min, out_max);
-  }
-  
-  return activation_out;
-}
-
-
-void* FCLayer_PROMISE(void* input, float i_min, float i_max,
-		      void* weights, float w_min, float w_max,
-		      void* bias, float b_min, float b_max,
-		      int activation_id,
-		      float out_min, float out_max, int swing){ //NOTE: min_val, max_val apply to 'ClippedRelu'
-
-
-  
-  #ifdef PROMISE_TUNER_ENABLED
-
-  // NOTE: Skip reading file-based error levels for ApproxHPVM wrapper runtime
-  if(!approxhpvm_runtime_mode){
-
-    if(op_counter >= total_ops){
-      ERROR("No accuracy flag found \n");
-    }
-  
-    swing = op_accuracies[op_counter];
-    op_counter++;
-  }
-  
-  #endif
- 
-  
-  if (swing < 0 || swing > 20){
-    ERROR("Incorrect swing value");
-  }
-  
-  if(swing < 8){
-    input = quantizeTensorPromise(input, i_min, i_max);
-    weights = quantizeTensorPromise(weights, w_min, w_max);
-    if(bias != NULL)
-      bias = quantizeTensorPromise(bias, b_min, b_max);
-
-    // NOTE: Modelling aRead error in PROMISE
-    input = addPromiseError(input, swing);
-  }
-
-
-  
-  void* gemm_out;
-  if(swing >= 8 && swing < 11){
-    gemm_out = tensorHalfGemm(input, weights);
-  }
-  else{
-    gemm_out = tensorGemmGPU(input, weights);
-  }
-
-  
-  void* gemmbias_out;
-  if(bias != NULL){
-    // Swing 8 corresponds to FP32
-    if(swing >= 8 && swing < 20){
-      gemmbias_out = tensorHalfAdd(gemm_out, bias);
-    }
-    else{
-      gemmbias_out = tensorAdd(gemm_out, bias);
-    }
-  }
-  else{
-    gemmbias_out = gemm_out;
-  }
- 
-  void* activation_out;
-  switch(activation_id){
-
-  case -1:
-    activation_out = gemmbias_out;
-    INFO("No Activation Function \n");
-    break;
-  case 0:
-    activation_out = tensorTanh(gemmbias_out);
-    break;
-  case 1:
-    activation_out = tensorRelu(gemmbias_out);
-    break;
-  case 2:
-    activation_out = tensorRelu2(gemmbias_out, out_min, out_max);
-    break;
-  default:
-    ERROR("Activation id %d NOT supported \n", activation_out);
-    break;
-  }
-  
-  
-  if(swing < 8 && activation_id != -1){
-    activation_out = quantizeTensorPromise(activation_out, out_min, out_max);
-  }
-  
-  return activation_out;
-}
-
-*****/
-
-
-
-/**** Wrapper Runtime API ***/
-  
-void* wrapper_ConvLayer(const char* hpvm_node_id,
-			void* input, 
-		        void* filter, 
-		        void* bias, 
-		        int conv_pad_h, int conv_pad_w,
-		        int conv_stride_h, int conv_stride_w,
-		        int pool_id, int pool_size,
-		        int activation_id,
-		        // NOTE: out_min, out_max are only relevant for ClippedRelu
-		        float out_min, float out_max){
-
-  NodeConfiguration *NodeConf = RC->getNodeConfiguration(hpvm_node_id);
-
-  if (NodeConf->isPROMISENodeConfiguration()) {
-    DEBUG("PROMISE Configuration for ConvLayer\n");
-    // Mapped to PROMISE - get a PROMISE node configuration
-    PROMISENodeConfiguration *PROMISEConf = (PROMISENodeConfiguration *)NodeConf;
-    std::vector<float> &QRanges = RC->getQuantizationRanges(hpvm_node_id);
-
-    std::vector<std::pair<PROMISENodeConfiguration::APPROX, int> > &approxTuples =
-      PROMISEConf->getApproxChoices();
-
-    if (approxTuples.size() == 1) {
-      enum PROMISENodeConfiguration::APPROX approx = approxTuples[0].first;
-      int param = approxTuples[0].second;
-      if (approx == PROMISENodeConfiguration::APPROX::SWING_LEVEL) {
-        DEBUG("Approximation choice for ConvLayer: swing level %d\n", param);
-
-        struct Tensor* input_tensor_cast = (struct Tensor*) input;
-        struct Tensor* filter_tensor_cast = (struct Tensor*) filter;
-        std::pair<double, double> pinfo =
-          RC->conv_profile(input_tensor_cast->dims.dim_sizes[0], //n
-                           input_tensor_cast->dims.dim_sizes[1], //c
-                           input_tensor_cast->dims.dim_sizes[2], //h
-                           input_tensor_cast->dims.dim_sizes[3], //w
-                           filter_tensor_cast->dims.dim_sizes[0], //c_out
-                           filter_tensor_cast->dims.dim_sizes[1], //c_in
-                           filter_tensor_cast->dims.dim_sizes[2], //k_h
-                           filter_tensor_cast->dims.dim_sizes[3], //k_w
-                           conv_stride_h, //s_h
-                           conv_stride_w, //s_w
-                           param, //voltage_swing
-                           filter_tensor_cast->dims.dim_sizes[2] *
-                             filter_tensor_cast->dims.dim_sizes[3] /*patch_factor: k_h*k_w*/);
-        RC->addToCurrentIterationComputeTime("ConvLayer_PROMISE", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("ConvLayer_PROMISE", pinfo.second);
-        void* t_out;
-        t_out = PROMISE_Conv(input, QRanges[0], QRanges[1],
-                            filter, QRanges[2], QRanges[3],
-                            bias, QRanges[4], QRanges[5],
-                            conv_pad_h, conv_pad_w,
-                            conv_stride_h, conv_stride_w,
-                            pool_id, pool_size,
-                            activation_id,
-                            QRanges[6], QRanges[7], param);
-
-        return t_out;
-      } else {
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      }
-      // TODO additional approx methods implemented here
-
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  }
-  else
-  if (NodeConf->isGPUNodeConfiguration()) {
-    DEBUG("GPU Configuration for ConvLayer\n");
-    // Mapped to GPU - get a GPU node configuration
-    GPUNodeConfiguration *GPUConf = (GPUNodeConfiguration *)NodeConf;
-
-    std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                            std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                   int> > > > &ApproxChoices =
-      GPUConf->getApproxChoices();
-
-    // Check for convolution as first operation
-    CUSTOM_ASSERT((ApproxChoices.size() >= 1) &&
-           (ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::CONV) &&
-           "Incorrect number/type of operations in provided Conv layer configuration");
-
-    void* conv_out = handleTensorConvApproximationTuples(ApproxChoices[0].second,
-                       input, filter, conv_pad_h, conv_pad_w,
-                       conv_stride_h, conv_stride_w);
-    void* add_out;
-    if (bias != NULL) {
-      // Check for add as second operation
-      CUSTOM_ASSERT((ApproxChoices.size() >= 2) &&
-             (ApproxChoices[1].first == GPUNodeConfiguration::TENSOR_OP::ADD) &&
-             "Incorrect number/type of operations in provided Conv layer configuration");
-      add_out = handleTensorAddApproximationTuples(ApproxChoices[1].second,
-                                                   conv_out, bias);
-    } else {
-      add_out = conv_out;
-    }
-
-    void* activation_out;
-    switch (activation_id) {
-      case -1:
-        { // No activation
-          INFO("No activation Function\n");
-          activation_out = add_out;
-        }
-        break;
-      case 0:
-        { // TanH activation
-          CUSTOM_ASSERT((ApproxChoices.size() >= 3) &&
-                 (ApproxChoices[2].first == GPUNodeConfiguration::TENSOR_OP::TANH) &&
-                 "Incorrect number/type of operations in provided Conv layer configuration");
-          activation_out = handleTensorTanhApproximationTuples(ApproxChoices[2].second,
-                                                               add_out);
-        }
-        break;
-      case 1:
-        { // ReLU activation
-          CUSTOM_ASSERT((ApproxChoices.size() >= 3) &&
-                 (ApproxChoices[2].first == GPUNodeConfiguration::TENSOR_OP::RELU) &&
-                 "Incorrect number/type of operations in provided Conv layer configuration");
-          activation_out = handleTensorReluApproximationTuples(ApproxChoices[2].second,
-                                                               add_out);
-        }
-        break;
-      case 2:
-        { // Clipped ReLU activation
-          CUSTOM_ASSERT((ApproxChoices.size() >= 3) &&
-                 (ApproxChoices[2].first == GPUNodeConfiguration::TENSOR_OP::CLIPPED_RELU) &&
-                 "Incorrect number/type of operations in provided Conv layer configuration");
-          activation_out =
-            handleTensorClippedReluApproximationTuples(ApproxChoices[2].second,
-                                                       add_out, out_min, out_max);
-        }
-        break;
-      default:
-        {
-          ERROR("Activation id %d NOT supported \n", activation_id);
-        }
-        break;
-    }
-
-    void* pool_out;
-
-    if (pool_size > 0) {
-      switch (pool_id) {
-        case 0:
-          {
-            // If we remove the asserts, we can have all cases handled by a single call
-            CUSTOM_ASSERT((ApproxChoices.back().first == GPUNodeConfiguration::TENSOR_OP::POOL_MAX) &&
-                  "Expected POOL_MAX in provided Conv layer configuration");
-            pool_out =
-              handleTensorPoolingApproximationTuples(ApproxChoices.back().second,
-                                                     activation_out, pool_id,
-                                                     pool_size, pool_size, 0, 0,
-                                                     pool_size, pool_size);
-          }
-          break;
-        case 1:
-          {
-            CUSTOM_ASSERT((ApproxChoices.back().first == GPUNodeConfiguration::TENSOR_OP::POOL_MEAN) &&
-                  "Expected POOL_MEAN in provided Conv layer configuration");
-            pool_out =
-              handleTensorPoolingApproximationTuples(ApproxChoices.back().second,
-                                                     activation_out, pool_id,
-                                                     pool_size, pool_size, 0, 0,
-                                                     pool_size, pool_size);
-          }
-          break;
-        case 2:
-          {
-            CUSTOM_ASSERT((ApproxChoices.back().first == GPUNodeConfiguration::TENSOR_OP::POOL_MIN) &&
-                  "Expected POOL_MIN in provided Conv layer configuration");
-            pool_out =
-              handleTensorPoolingApproximationTuples(ApproxChoices.back().second,
-                                                     activation_out, pool_id,
-                                                     pool_size, pool_size, 0, 0,
-                                                     pool_size, pool_size);
-          }
-          break;
-        default:
-          {
-            ERROR("Pool id %d NOT supported \n", pool_id);
-          }
-          break;
-      }
-    } else {
-      pool_out = activation_out;
-    }
-    return pool_out;
-  }
-  else {
-    ERROR("Unsupported Configuration");
-    abort();
-  }
-
-  return NULL;
-}
-
-
-void* wrapper_FCLayer(const char* hpvm_node_id,
-		      void* input, 
-		      void* weights, 
-		      void* bias, 
-		      int activation_id,
-		      // NOTE: out_min and out_max are only relevant for ClippedRelu
-		      float out_min, float out_max){ 
-
-  NodeConfiguration *NodeConf = RC->getNodeConfiguration(hpvm_node_id);
-
-  if (NodeConf->isPROMISENodeConfiguration()) {
-    DEBUG("PROMISE Configuration for FCLayer\n");
-    // Mapped to PROMISE - get a PROMISE node configuration
-    PROMISENodeConfiguration *PROMISEConf = (PROMISENodeConfiguration *)NodeConf;
-    std::vector<float> &QRanges = RC->getQuantizationRanges(hpvm_node_id);
-
-    std::vector<std::pair<PROMISENodeConfiguration::APPROX, int> > &approxTuples =
-      PROMISEConf->getApproxChoices();
-
-    if (approxTuples.size() == 1) {
-      enum PROMISENodeConfiguration::APPROX approx = approxTuples[0].first;
-      int param = approxTuples[0].second;
-      if (approx == PROMISENodeConfiguration::APPROX::SWING_LEVEL) {
-        DEBUG("Approximation choice for FCLayer: swing level %d\n", param);
-
-        struct Tensor* input_tensor_cast = (struct Tensor*) input;
-        struct Tensor* weights_tensor_cast = (struct Tensor*) weights;
-        CUSTOM_ASSERT((input_tensor_cast->dims.dim_sizes[1] *
-                       input_tensor_cast->dims.dim_sizes[2] *
-                       input_tensor_cast->dims.dim_sizes[3] ==
-                         weights_tensor_cast->dims.dim_sizes[2]) &&
-                      "Dimensions for matrix multiplication do not match.");
-        std::pair<double, double> pinfo =
-          RC->fc_profile(input_tensor_cast->dims.dim_sizes[0], //num_rows_a,
-                         input_tensor_cast->dims.dim_sizes[1] *
-                           input_tensor_cast->dims.dim_sizes[2] *
-                           input_tensor_cast->dims.dim_sizes[3], //num_cols_a,
-                         weights_tensor_cast->dims.dim_sizes[2], //num_rows_b,
-                         weights_tensor_cast->dims.dim_sizes[3], //num_cols_b,
-                         param, //voltage_swing,
-                         1 /*patch_factor*/);
-        RC->addToCurrentIterationComputeTime("FCLayer_PROMISE", pinfo.first);
-        RC->addToCurrentIterationComputeEnergy("FCLayer_PROMISE", pinfo.second);
-        void* t_out;
-        t_out = PROMISE_FC(input, QRanges[0], QRanges[1],
-                           weights, QRanges[2], QRanges[3],
-                           bias, QRanges[4], QRanges[5],
-                           activation_id,
-                           QRanges[6], QRanges[7], param);
-        return t_out;
-      } else {
-        CUSTOM_ASSERT(false && "Unknown approximation type");
-        ERROR("Unknown approximation type");
-        abort();
-      }
-      // TODO additional approx methods implemented here
-
-    } else if (approxTuples.size() == 2) {
-      ERROR("Currently unsupported case");
-      abort();
-    } else {
-      ERROR("Unsupported case");
-      abort();
-    }
-  }
-  else
-  if (NodeConf->isGPUNodeConfiguration()) {
-    DEBUG("GPU Configuration for FCLayer\n");
-    // Mapped to GPU - get a GPU node configuration
-    GPUNodeConfiguration *GPUConf = (GPUNodeConfiguration *)NodeConf;
-
-    std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                            std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                   int> > > > &ApproxChoices =
-      GPUConf->getApproxChoices();
-
-    // Approximation choices must be for a FC wrapper operation
-    CUSTOM_ASSERT((ApproxChoices.size() == 2 || ApproxChoices.size() == 3) &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::MUL &&
-         ApproxChoices[1].first == GPUNodeConfiguration::TENSOR_OP::ADD &&
-         "Invalid configuration generated for FC layer wrapper operation");
-
-    void* gemm_out = handleTensorMulApproximationTuples(ApproxChoices[0].second,
-                                                        input, weights);
-    void* add_out = handleTensorAddApproximationTuples(ApproxChoices[1].second,
-                                                        gemm_out, bias);
-
-    void* activation_out;
-    switch (activation_id) {
-      case -1:
-        { // No activation
-          CUSTOM_ASSERT((ApproxChoices.size() == 2) &&
-                 "Incorrect number of operations in provided FC layer configuration");
-          INFO("No activation Function\n");
-          activation_out = add_out;
-        }
-        break;
-      case 0:
-        { // TanH activation
-          CUSTOM_ASSERT((ApproxChoices.size() == 3) &&
-                 (ApproxChoices[2].first == GPUNodeConfiguration::TENSOR_OP::TANH) &&
-                 "Incorrect number/type of operations in provided FC layer configuration");
-          activation_out = handleTensorTanhApproximationTuples(ApproxChoices[1].second,
-                                                               add_out);
-        }
-        break;
-      case 1:
-        { // ReLU activation
-          CUSTOM_ASSERT((ApproxChoices.size() == 3) &&
-                 (ApproxChoices[2].first == GPUNodeConfiguration::TENSOR_OP::RELU) &&
-                 "Incorrect number/type of operations in provided FC layer configuration");
-          activation_out = handleTensorReluApproximationTuples(ApproxChoices[1].second,
-                                                               add_out);
-        }
-        break;
-      case 2:
-        { // Clipped ReLU activation
-          CUSTOM_ASSERT((ApproxChoices.size() == 3) &&
-                 (ApproxChoices[2].first == GPUNodeConfiguration::TENSOR_OP::CLIPPED_RELU) &&
-                 "Incorrect number/type of operations in provided FC layer configuration");
-          activation_out =
-            handleTensorClippedReluApproximationTuples(ApproxChoices[1].second,
-                                                       add_out, out_min, out_max);
-        }
-        break;
-      default:
-        {
-          ERROR("Activation id %d NOT supported \n", activation_id);
-        }
-        break;
-    }
-    return activation_out;
-  }
-  else {
-    ERROR("Unsupported Configuration");
-    abort();
-  }
-
-  return NULL;
-}
-
-
-
-
-void* wrapper_tensorRelu(const char* hpvm_node_id, void* input_ptr){
-//  return tensorRelu(input_ptr);
-
-  // Only mapped to GPU - get a GPU configuration
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-
-  // Approximation choices must be for a relu operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::RELU &&
-         "Invalid configuration generated for tensor relu wrapper operation");
-
-  return handleTensorReluApproximationTuples(ApproxChoices[0].second,
-                                             input_ptr);
-
-}
-
-void* wrapper_tensorClippedRelu(const char* hpvm_node_id,
-                                void* input_ptr,
-                                float out_min, float out_max){
-  // Only mapped to GPU - get a GPU configuration
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-
-  // Approximation choices must be for a relu operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::CLIPPED_RELU &&
-         "Invalid configuration generated for tensor clipped relu wrapper operation");
-
-  return handleTensorClippedReluApproximationTuples(ApproxChoices[0].second,
-                                                    input_ptr, out_min, out_max);
-
-}
-
-void* wrapper_tensorTanh(const char* hpvm_node_id, void* input_ptr){
-//  return tensorTanh(input_ptr);
-
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-
-  // Approximation choices must be for a tanh operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::TANH &&
-         "Invalid configuration generated for tensor tanh wrapper operation");
-
-  return handleTensorTanhApproximationTuples(ApproxChoices[0].second,
-                                             input_ptr);
-
-}
-
-
-void* wrapper_tensorBatchNorm(const char* hpvm_node_id,
-			      void* input_ptr, void* gamma_ptr, void* beta_ptr,
-			      void* mean_ptr, void* variance_ptr, double epsilon){
-//  return tensorBatchNorm(input_ptr, gamma_ptr, beta_ptr, mean_ptr, variance_ptr, epsilon);
-
-  // Only mapped to GPU - get a GPU configuration
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-
-  // Approximation choices must be for a batchnorm operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::BATCHNORM &&
-         "Invalid configuration generated for tensor batchnorm wrapper operation");
-
-  return handleTensorBatchNormApproximationTuples(ApproxChoices[0].second,
-                                                  input_ptr, gamma_ptr, beta_ptr,
-                                                  mean_ptr, variance_ptr, epsilon);
-
-}
-
-
-void* wrapper_tensorAdd(const char* hpvm_node_id, void* input_ptr, void* bias_ptr){
-//  return tensorAdd(input_ptr, bias_ptr);
-
-  // Only mapped to GPU - get a GPU configuration
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-
-  // Approximation choices must be for an add operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::ADD &&
-         "Invalid configuration generated for tensor add wrapper operation");
-
-  return handleTensorAddApproximationTuples(ApproxChoices[0].second,
-                                            input_ptr, bias_ptr);
-
-}
-
-
-void* wrapper_tensorPooling(const char* hpvm_node_id,
-			    void* input_ptr,
-			    int poolFunction,
-			    int window_height, int window_width,
-			    int vertical_pad, int horizontal_pad,
-			    int vertical_stride, int horizontal_stride){
-
-//  return tensorPooling(input_ptr, poolFunction, window_height, window_width,
-//		       vertical_pad, horizontal_pad, vertical_stride, horizontal_stride);
-
-  // Only mapped to GPU - get a GPU configuration
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-
-  // Approximation choices must be for a single operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-                "Invalid configuration generated for tensor pool wrapper operation");
-  enum GPUNodeConfiguration::TENSOR_OP top = ApproxChoices[0].first;
-  // Approximation choices must be for a pool operation
-  CUSTOM_ASSERT((top == GPUNodeConfiguration::TENSOR_OP::POOL_MAX  ||
-                 top == GPUNodeConfiguration::TENSOR_OP::POOL_MEAN ||
-                 top == GPUNodeConfiguration::TENSOR_OP::POOL_MIN) &&
-         "Invalid configuration generated for tensor pool wrapper operation");
-
-  return handleTensorPoolingApproximationTuples(ApproxChoices[0].second,
-                                                input_ptr, poolFunction,
-                                                window_height, window_width,
-                                                vertical_pad, horizontal_pad,
-                                                vertical_stride, horizontal_stride);
-
-}
-
-
-void* wrapper_tensorGroupConvolution(const char* hpvm_node_id,
-                                     void* input, void* filter,
-				                     int vertical_pad, int horizontal_pad,
-				                     int vertical_stride, int horizontal_stride,
-				                     int conv_mode, int conv_groups){
-  // Only mapped to GPU - get a GPU configuration
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-
-  // Approximation choices must be for a group_conv operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::GROUP_CONV &&
-         "Invalid configuration generated for tensor group_conv wrapper operation");
-
-  return handleTensorGroupConvApproximationTuples(ApproxChoices[0].second,
-                                                  input, filter,
-                                                  vertical_pad, horizontal_pad,
-                                                  vertical_stride, horizontal_stride,
-                                                  conv_mode, conv_groups);
-
-}
-
-
-
-void* wrapper_tensorSoftmax(const char* hpvm_node_id, void* input_ptr){
-//  return tensorSoftmax(input_ptr);
-
-  // Only mapped to GPU - get a GPU configuration
-  GPUNodeConfiguration *GPUConf =
-    (GPUNodeConfiguration *)RC->getNodeConfiguration(hpvm_node_id);
-
-  std::vector< std::pair< GPUNodeConfiguration::TENSOR_OP,
-                          std::vector< std::pair<GPUNodeConfiguration::APPROX,
-                                                 int> > > > &ApproxChoices =
-    GPUConf->getApproxChoices();
-
-  // Approximation choices must be for a softmax operation
-  CUSTOM_ASSERT(ApproxChoices.size() == 1 &&
-         ApproxChoices[0].first == GPUNodeConfiguration::TENSOR_OP::SOFTMAX &&
-         "Invalid configuration generated for tensor softmax wrapper operation");
-
-  return handleTensorSoftmaxApproximationTuples(ApproxChoices[0].second, input_ptr);
-
-
-}
-
-
-
diff --git a/llvm/projects/soc_simulator/CMakeLists.txt b/llvm/projects/soc_simulator/CMakeLists.txt
deleted file mode 100644
index 6a78ce1ce5ea16a119838c71b195b4c1e0209010..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/CMakeLists.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-cmake_minimum_required(VERSION 3.5)
-set(libsrc src/promise_timing_model.cpp)
-set (CMAKE_CXX_STANDARD 11)
-add_library(promise_profiler STATIC ${libsrc})
-target_include_directories(promise_profiler PRIVATE include)
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_confs1.txt b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_confs1.txt
deleted file mode 100644
index f8add8f3feabc93963e6f75fbce1ace36412ad07..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_confs1.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9
-9 9 9,7,7,7,9 9 9,9 9 9 9,9 9
-9 9 9,7,7,7,9 9 9,8 8 8 8,9 9
-9 9 9,7,7,7,8 8 8,9 9 9 9,9 9
-9 9 9,7,7,7,9 9 9,7,9 9
-9 9 9,7,7,7,9 9 9,7,9 9
-9 9 9,7,9 9 9,7,9 9 9,7,9 9
-9 9 9,7,7,7,9 9 9,9 9 9 9,9 9
-9 9 9,7,7,7,8 8 8,9 9 9 9,9 9
-9 9 9,7,7,7,9 9 9,7,9 9
-9 9 9,7,7,7,9 9 9,7,9 9
-9 9 9,7,7,7,9 9 9,7,9 9
-9 9 9,7,7,7,9 9 9,9 9 9 9,9 9
-9 9 9,7,7,7,9 9 9,8 8 8 8,9 9
-9 9 9,7,7,7,9 9 9,7,9 9
-9 9 9,7,7,7,8 8 8,9 9 9 9,9 9
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_confs2.txt b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_confs2.txt
deleted file mode 100644
index da9dce2be8e9649ece5376e363ede5b3a84629fe..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_confs2.txt
+++ /dev/null
@@ -1,19 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9
-9 9 9,6,8 8 8,6,6,6,9 9
-9 9 9,6,8 8 8,6,6,6,9 9
-9 9 9,6,8 8 8,6,6,7,9 9
-9 9 9,6,8 8 8,9 9 9 9,6,6,9 9
-9 9 9,6,8 8 8,6,8 8 8,6,9 9
-9 9 9,6,8 8 8,6,6,6,9 9
-9 9 9,6,8 8 8,6,6,6,9 9
-9 9 9,6,8 8 8,6,6,6,9 9
-9 9 9,6,8 8 8,6,6,6,9 9
-9 9 9,6,8 8 8,6,6,7,9 9
-9 9 9,6,8 8 8,9 9 9 9,6,6,9 9
-9 9 9,6,8 8 8,6,8 8 8,6,9 9
-9 9 9,6,8 8 8,6,6,6,9 9
-9 9 9,6,8 8 8,6,6,7,9 9
-9 9 9,6,8 8 8,6,6,6,9 9
-9 9 9,7,7,6,8 8 8,6,9 9
-9 9 9,6,8 8 8,9 9 9 9,6,6,9 9
-9 9 9,6,8 8 8,6,6,6,9 9
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_fp16.csv b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_fp16.csv
deleted file mode 100644
index 2414bed90d2339bb232800ceccf1711302aa4174..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_fp16.csv
+++ /dev/null
@@ -1,70 +0,0 @@
-Add1,36.2217,116.852,62.3719,54.4799,6479.76,3464.4,3015.35,25.6183,86.2375,47.824,39.0211,4798.11,2666.76,2164.16
-Add1_f2h,56.9247,172.83,93.6324,79.1972,6048.31,3271.98,2776.33,40.2841,129.638,73.389,57.1983,4518.79,2553.55,1999.31
-Add1_h2f,41.6054,134.411,71.4991,62.9117,6462.47,3437.92,3024.54,29.4206,98.8293,54.3763,45.0458,4752.33,2615.03,2165.77
-Add2,35.0236,153.259,99.7851,53.4736,8759.13,5703.66,3055.47,24.7684,108.633,70.7873,37.8589,6213.07,4049.51,2164.3
-Add2_f2h,158.537,686.903,462.538,224.365,8696.58,5858.33,2838.26,112.305,486.692,327.861,158.872,6171.91,4160.96,2011.47
-Add2_h2f,41.5412,177.63,114.226,63.4042,8551.44,5498.99,3052.45,29.3753,125.993,81.1067,44.9049,6064.86,3904.13,2161.65
-Add3,21.2537,100.994,63.1299,37.8636,9509.91,5945.29,3564.62,15.0519,71.5273,44.7163,26.8142,6729.98,4208.46,2521.83
-Add3_f2h,28.3975,126.566,79.763,46.8029,8911.18,5615.63,3295.55,20.0846,89.624,56.5024,33.1257,6306.83,3975.68,2331.45
-Add3_h2f,20.9649,93.9519,58.396,35.5559,8962.73,5570.82,3391.91,14.8261,66.498,41.3441,25.1571,6342.95,3943.66,2399.6
-Add4,19.4384,96.5969,63.7597,32.8372,9942.78,6562.79,3380,13.7631,68.3882,45.1432,23.2456,7033.19,4642.48,2390.77
-Add4_f2h,76.34,366.744,245.383,121.361,9608.93,6429.36,3179.57,54.0262,259.574,173.68,85.8957,6795.77,4547.27,2248.55
-Add4_h2f,20.8326,97.3319,63.8484,33.4835,9343.97,6129.52,3214.45,14.7322,68.8549,45.1702,23.6853,6609.33,4335.85,2273.54
-Add5,19.2262,98.1635,63.363,34.8005,10213,6592.51,3620.45,13.5983,69.4422,44.8239,24.6189,7224.11,4663.35,2560.82
-Add5_f2h,14.7236,68.2983,44.2206,24.0777,9277.35,6006.75,3270.59,10.4122,48.3256,31.2902,17.0358,6563.72,4249.95,2313.82
-Add5_h2f,10.5498,48.5475,31.279,17.2686,9203.37,5929.71,3273.66,7.46111,34.3694,22.1444,12.2253,6514.35,4197.26,2317.14
-Add6,18.7741,98.9177,65.9587,32.959,10538.9,7027.44,3511.51,13.2788,70.0688,46.7239,23.3455,7463.67,4977.02,2486.72
-Add6_f2h,39.6841,197.748,132.575,65.1736,9965.93,6681.4,3284.52,28.0732,139.953,93.8285,46.1249,7049.93,4726.52,2323.44
-Add6_h2f,10.548,49.6626,33.0275,16.635,9451.94,6285.89,3166.05,7.48932,35.1512,23.3772,11.7744,6695.42,4452.75,2242.74
-Add7,0.658159,3.49495,2.27537,1.21958,11081.8,7217.49,3864.29,0.52552,3.31175,2.15602,1.15575,11595.3,7554.3,4041.16
-Add7_f2h,0.887661,1.49455,0.974188,0.52036,2879.78,1876.88,1002.9,0.714309,1.9336,1.26097,0.672641,3070.05,2000.93,1069.15
-Add7_h2f,0.140755,0.0381309,0.0248393,0.0132916,504.337,328.401,175.936,0.101868,0.096878,0.0630995,0.0337795,1295.66,843.266,452.404
-Conv1,200.956,558.231,289.217,269.014,5491.56,2830.66,2660.9,142.184,439.06,248.366,197.045,4285.98,2415.45,1937.47
-Conv1_f2h,6.64024,16.2201,8.18921,8.03089,4750.32,2365.55,2384.77,4.70953,13.1579,7.43711,5.95826,3789.64,2126.15,1744.54
-Conv1_h2f,41.8978,123.674,68.3327,55.3409,5907.39,3264.34,2643.05,29.6316,93.3619,54.0372,40.0462,4460.74,2582.14,1913.01
-Conv2,610.501,2404.06,1472.21,931.848,8000.29,4920.48,3079.81,433.644,1704.25,1048.11,658.96,5732.95,3554.54,2187.14
-Conv2_f2h,56.8745,201.057,104.016,97.0408,7053.45,3645.39,3408.06,40.2486,145.587,76.9607,69.1825,5092.26,2687.52,2424.64
-Conv2_h2f,41.8404,192.576,135.567,57.0093,9205.52,6480.36,2725.15,29.5868,136.713,96.2778,40.438,6535.05,4602.22,1932.98
-Conv3,188.912,837.371,508.274,329.097,8868.48,5383.62,3484.86,133.617,592.676,359.91,232.821,6277.65,3812.98,2465.25
-Conv3_f2h,15.6031,66.3758,39.8497,26.5262,8494.73,5098.38,3396.35,11.0572,47.1949,28.3617,18.8384,6017.1,3613.78,2403.98
-Conv3_h2f,21.0237,93.63,59.667,33.963,8906.6,5675.84,3230.76,14.8682,66.2814,42.2505,24.0337,6303.73,4018.24,2285.77
-Conv4,284.927,1346.18,857.932,488.249,9448.68,6021.77,3426.9,201.502,952.374,607.015,345.376,6683.2,4259.76,2423.56
-Conv4_f2h,28.3107,127.724,77.4371,50.2866,9022.12,5469.86,3552.26,20.0267,90.4336,54.8443,35.5938,6384.94,3872.03,2513.22
-Conv4_h2f,20.9833,102.055,69.6208,32.434,9727.36,6635.93,3091.43,14.8387,72.2015,49.2568,22.9451,6881.39,4694.6,2186.84
-Conv5,86.4203,417.756,265.595,152.161,9668.23,6146.81,3521.41,61.1232,295.494,187.871,107.626,6837.22,4347.11,2490.16
-Conv5_f2h,8.38767,36.7727,23.2235,13.5492,8765.45,5535.76,3229.69,5.9326,26.0583,16.4576,9.60107,6207.8,3920.67,2287.22
-Conv5_h2f,10.6053,48.7992,31.7579,17.0412,9202.4,5988.83,3213.57,7.5001,34.5413,22.48,12.0618,6512.52,4238.46,2274.14
-Conv6,139.892,696.675,453.631,243.043,9961.36,6486.38,3474.97,98.9321,492.663,320.792,171.874,7044.22,4587,2457.26
-Conv6_f2h,14.7838,68.8585,43.7444,25.1141,9313.71,5916.85,3396.85,10.4574,48.7457,30.9686,17.7776,6589.63,4186.51,2403.19
-Conv6_h2f,10.568,51.057,34.5719,16.4851,9662.37,6542.6,3119.77,7.47346,36.2259,24.5301,11.6961,6854.97,4641.76,2213.26
-Mul1,4.29982,22.6027,14.73,7.87274,10529.9,6862.81,3667.08,3.06206,16.1877,10.549,5.63885,7504.4,4891.15,2613.32
-Mul1_f2h,5.01057,21.4017,13.9545,7.44724,8536.09,5565.65,2970.44,3.54931,15.2502,9.94396,5.30637,6067.7,3956.33,2111.43
-Mul1_h2f,0.219266,0.10134,0.0660095,0.03533,811.894,528.683,283.211,0.163586,0.22974,0.14973,0.080012,1619.98,1054.57,565.431
-Pool1,43.674,198.246,119.456,78.7903,9078.2,5470.13,3608.07,30.8997,140.487,84.7207,55.7843,6429.35,3877.11,2553.05
-Pool1_f2h,54.8347,240.102,146.135,93.9672,8752.72,5326.68,3426.04,38.7916,170.226,103.7,66.5481,6199.29,3775.75,2424.32
-Pool1_h2f,10.6202,44.9693,27.1714,17.7978,8469.49,5117.56,3351.94,7.50988,31.8572,19.2617,12.5996,6000.37,3628.12,2373
-Pool2,22.3065,111.077,70.4439,40.6332,9958.57,6315.5,3643.07,15.7792,78.602,49.8515,28.7515,7043.82,4467.22,2576.69
-Pool2_f2h,27.6028,129.232,82.458,46.7739,9362.89,5974.06,3388.83,19.5202,91.4233,58.3375,33.0868,6622.4,4225.7,2396.77
-Pool2_h2f,5.39931,22.7498,14.4354,8.3144,8425.89,5346.44,3079.45,3.81822,16.1451,10.2449,5.9004,5978.26,3793.49,2184.86
-Pool3,12.0697,64.0871,41.917,22.1701,10618.3,6945.08,3673.23,8.53664,45.3661,29.6731,15.6935,7513.77,4914.65,2599.18
-Pool3_f2h,14.3906,68.8152,45.1258,23.6894,9563.88,6271.6,3292.28,10.1772,48.6924,31.9299,16.7629,6766.23,4437.02,2329.26
-Pool3_h2f,2.78859,10.4233,6.81839,3.60494,7474.86,4889.52,2585.34,1.97384,7.50692,4.91099,2.59601,5378.12,3518.12,1860.05
-Softmax1,2.26715,7.67319,4.99613,2.67706,6769.48,4407.84,2361.64,1.60403,5.63971,3.67227,1.96749,4973.77,3238.83,1734.98
-Tanh1,33.4466,118.003,61.6149,56.3879,7064.04,3690.25,3373.79,23.6527,85.7445,45.9758,40.1967,5137.61,2756.88,2406.17
-Tanh1_f2h,57.0977,185.926,97.1494,88.7762,6536.44,3420.5,3115.94,40.4191,135.633,73.0493,63.3674,4779.37,2580.1,2226.23
-Tanh1_h2f,41.7089,145.509,76.2065,69.3025,6978.55,3654.51,3324.04,29.4985,105.384,56.4528,49.367,5054.37,2707.22,2368.07
-Tanh2,36.2146,162.298,101.111,61.1867,8963.85,5584.56,3379.29,25.609,115.01,71.7167,43.3095,6352.26,3961.22,2391.95
-Tanh2_f2h,56.9829,242.874,153.169,89.7055,8520.54,5373.23,3147.31,40.3301,172.418,108.85,63.5934,6040.38,3813.04,2228.24
-Tanh2_h2f,41.625,182.762,113.157,69.605,8781.86,5437.35,3344.5,29.4345,129.486,80.2412,49.263,6222.04,3855.85,2367.07
-Tanh3,18.3396,88.5345,54.3058,34.2287,9654.14,5921.68,3732.46,12.9703,62.6686,38.4496,24.2216,6831.73,4191.48,2640.53
-Tanh3_f2h,28.9295,129.858,80.0577,49.7999,8969.72,5529.15,3440.56,20.4875,92.1118,56.8106,35.3056,6347.03,3913.57,2433.76
-Tanh3_h2f,20.8836,94.2318,57.6446,36.5872,9024.13,5520.31,3503.81,14.7676,66.6859,40.8043,25.8844,6385.64,3907.25,2478.66
-Tanh4,18.4717,92.4694,59.7134,32.7559,10012,6465.38,3546.6,13.0617,65.4106,42.2419,23.1693,7082.06,4573.57,2508.57
-Tanh4_f2h,28.0864,130.933,85.0439,45.8888,9321.9,6054.74,3267.16,19.8684,92.6641,60.1914,32.4736,6593.39,4282.78,2310.68
-Tanh4_h2f,20.8569,97.8308,62.9465,34.8843,9381.09,6035.98,3345.11,14.7494,69.2071,44.5324,24.6756,6635.71,4269.83,2365.97
-Tanh5,9.39612,49.9132,31.9349,17.9784,10620.9,6795.34,3825.57,6.64567,35.5193,22.7251,12.7945,7550.97,4831.1,2719.94
-Tanh5_f2h,14.9164,69.1465,44.357,24.7895,9279.12,5952.49,3326.62,10.5528,49.0071,31.4389,17.5687,6574.75,4217.83,2356.99
-Tanh5_h2f,10.5006,48.3962,30.92,17.4763,9217.58,5889.04,3328.55,7.42558,34.2539,21.8851,12.3692,6523.36,4167.82,2355.62
-Tanh6,9.37294,50.5591,33.3473,17.2118,10787.6,7115.14,3672.42,6.62805,35.791,23.607,12.1843,7635.55,5036.22,2599.38
-Tanh6_f2h,14.3859,68.827,45.5284,23.2985,9568.27,6329.29,3238.98,10.1735,48.7028,32.2173,16.4857,6769.57,4478.09,2291.52
-Tanh6_h2f,10.5241,49.2841,32.4417,16.8424,9386.9,6178.99,3207.92,7.44696,34.9422,23.0012,11.9412,6656.18,4381.49,2274.73
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_fp32.csv b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_fp32.csv
deleted file mode 100644
index e2cbba7f6782a0d77ab4c7279a3a88f5acfb532d..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_fp32.csv
+++ /dev/null
@@ -1,24 +0,0 @@
-Add1,47.2501,154.498,89.5402,64.9578,6426.98,3696.33,2730.65,33.4463,121.478,76.4718,47.0638,4993.53,3124.31,1963.93
-Add2,45.3358,217.323,149.508,67.8141,9594.62,6601.66,2992.96,32.0593,156.332,108.33,48.2216,6906.53,4787.12,2129.06
-Add3,26.3653,138.399,95.847,42.5524,10500.7,7272.43,3228.23,18.6463,98.9696,68.8605,30.2012,7508.99,5224.96,2291.01
-Add4,26.3854,150.498,109.738,40.7597,11414.1,8323.45,3090.67,18.6598,107.025,78.1688,28.8878,8120.56,5932,2190.98
-Add5,24.2343,136.978,99.389,37.5888,11306.5,8204.06,3102.4,17.1387,97.2822,70.6568,26.6431,8030.37,5832.9,2198.92
-Add6,24.3243,144.761,108.644,36.1167,11913.1,8941.05,2972.07,17.2054,102.532,76.9757,25.5618,8441.39,6337.58,2104.27
-Add7,0.256741,0.108633,0.0804227,0.0282101,837.078,619.959,217.119,0.184457,0.239262,0.17707,0.0622105,1850.45,1370.8,479.791
-Conv1,115.33,355.301,207.624,147.677,5963.71,3435.06,2528.65,81.7778,290.851,188.764,108.126,4791.55,3087.45,1825.79
-Conv2,310.308,1282.26,816.061,466.203,8263.6,5259,3004.6,219.438,941.609,613.698,332.739,6067.24,3954.22,2144.15
-Conv3,165.302,812.529,542.858,269.671,10211.6,6887.29,3324.32,119.55,575.142,383.942,192.317,7329.84,4983.3,2359.48
-Conv4,303.007,1619.75,1129.02,490.726,10843.2,7579.9,3263.26,215.314,1146.48,800.497,347.114,7743.98,5437.17,2313.98
-Conv5,156.337,882.718,630.491,252.227,11295,8067.89,3227.12,110.575,626.941,448.461,178.656,8022.03,5738.71,2285.56
-Conv6,248.321,1443.4,1054.07,389.33,11626.8,8490.88,3135.95,175.601,1023.01,747.582,275.556,8241.2,6022.64,2219.57
-Mul1,2.20702,9.19011,6.8038,2.38631,8310.13,6155.01,2155.12,1.56606,6.74557,4.99224,1.75375,6073,4498.52,1574.87
-Pool1,39.1015,194.307,129.484,64.8227,9955.51,6637,3318.52,27.6862,139.26,93.4326,46.0372,7137.28,4792.17,2355.71
-Pool2,45.8717,263.305,188.721,74.5842,11476,8224.81,3251.15,32.4473,187.287,134.477,52.87,8157.05,5856.35,2303.32
-Pool3,23.8305,140.946,104.705,36.2403,11829.1,8787.6,3041.49,16.856,99.855,74.2068,25.6543,8377.95,6226.11,2152.36
-Softmax1,2.09044,8.55352,6.33795,2.21557,8172.34,6055.47,2116.87,1.4798,6.29816,4.66796,1.63058,6001.96,4448.28,1554.05
-Tanh1,39.4831,135.034,76.518,58.5163,6760.26,3809.4,2950.86,27.9698,104.297,63.7634,42.2063,5167.76,3142.26,2115.25
-Tanh2,38.504,188.321,126.889,61.4314,9782.72,6591.66,3191.06,27.2277,135.275,91.8445,43.6415,7027.36,4771.38,2266.95
-Tanh3,19.3912,101.904,69.9865,31.9174,10512.4,7220.09,3292.33,13.7121,72.8245,50.2415,22.6485,7513.77,5184.09,2336.42
-Tanh4,19.2511,109.512,79.122,30.3901,11374.4,8217.68,3156.73,13.6147,77.9259,56.4047,21.5465,8090.47,5855.69,2237.41
-Tanh5,9.61697,52.0084,37.5149,14.4935,10817.2,7802.82,3014.35,6.80077,36.934,26.6693,10.2721,7682.21,5547.37,2136.37
-Tanh6,9.63435,54.9648,40.9982,13.9666,11410.6,8511.23,2899.37,6.81383,38.9614,29.0708,9.8929,8087.16,6034.3,2053.33
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_layers.txt b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_layers.txt
deleted file mode 100644
index 98dfa6fa380a34ee7ff5ce0615656deab585ac5b..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_layers.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-Conv1,2000,3,32,32,32,3,3,3,1,1
-Conv2,2000,32,32,32,32,32,3,3,1,1
-Conv3,2000,32,16,16,64,32,3,3,1,1
-Conv4,2000,64,16,16,64,64,3,3,1,1
-Conv5,2000,64,8,8,128,64,3,3,1,1
-Conv6,2000,128,8,8,128,128,3,3,1,1
-FC1,2000,2048,2048,10
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_ops.txt b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_ops.txt
deleted file mode 100644
index 7a26ba6faa8bf17a0a2067c8a80f69a514ad07c0..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_ops.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-#Conv1,3
-Conv1
-Add1
-Tanh1
-#Conv2,4
-Conv2
-Add2
-Tanh2
-Pool1
-#Conv3,3
-Conv3
-Add3
-Tanh3
-#Conv4,4
-Conv4
-Add4
-Tanh4
-Pool2
-#Conv5,3
-Conv5
-Add5
-Tanh5
-#Conv6,4
-Conv6
-Add6
-Tanh6
-Pool3
-#FC1,2
-Mul1
-Add7
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_confs1.txt b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_confs1.txt
deleted file mode 100644
index c9e13831df88fd375fd8439e20106a483a8342bc..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_confs1.txt
+++ /dev/null
@@ -1,23 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9
-9 9 9,7,8 8 8,6,8 8 8,4,7
-9 9 9,7,8 8 8,6,8 8 8,5,7
-9 9 9,7,4,5,8 8 8,5,7
-9 9 9,7,4,6,8 8 8,6,7
-9 9 9,5,7,6,8 8 8,8 8 8 8,7
-9 9 9,7,7,6,8 8 8,6,7
-9 9 9,6,8 8 8,7,8 8 8,7,5
-9 9 9,9 9 9 9,7,6,8 8 8,5,6
-9 9 9,5,8 8 8,4,8 8 8,7,6
-9 9 9,8 8 8 8,7,6,8 8 8,5,7
-9 9 9,7,7,4,8 8 8,6,7
-8 8 8,8 8 8 8,8 8 8,6,8 8 8,5,7
-9 9 9,7,7,6,8 8 8,8 8 8 8,7
-8 8 8,6,4,6,8 8 8,8 8 8 8,7
-8 8 8,9 9 9 9,5,5,8 8 8,5,7
-9 9 9,7,5,6,8 8 8,8 8 8 8,7
-9 9 9,6,8 8 8,8 8 8 8,8 8 8,7,5
-8 8 8,9 9 9 9,4,4,8 8 8,8 8 8 8,4
-8 8 8,8 8 8 8,6,6,8 8 8,8 8 8 8,7
-8 8 8,7,8 8 8,5,8 8 8,8 8 8 8,7
-9 9 9,8 8 8 8,7,6,8 8 8,8 8 8 8,7
-9 9 9,8 8 8 8,5,4,8 8 8,8 8 8 8,8 8
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_confs2.txt b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_confs2.txt
deleted file mode 100644
index 9188c561894dd811fa5adc5606c61afaf3972a1f..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_confs2.txt
+++ /dev/null
@@ -1,37 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9
-9 9 9,7,7,8 8 8 8,8 8 8,5,8 8
-8 8 8,7,7,8 8 8 8,8 8 8,5,4
-8 8 8,6,7,8 8 8 8,8 8 8,7,7
-8 8 8,6,5,6,8 8 8,5,6
-8 8 8,7,7,6,8 8 8,4,6
-8 8 8,7,7,7,8 8 8,5,7
-8 8 8,7,6,6,8 8 8,5,4
-8 8 8,7,5,7,8 8 8,5,7
-8 8 8,7,8 8 8,6,8 8 8,4,7
-9 9 9,7,8 8 8,6,8 8 8,5,7
-8 8 8,7,7,5,8 8 8,5,7
-9 9 9,7,4,5,8 8 8,5,7
-8 8 8,7,4,6,8 8 8,6,7
-8 8 8,7,7,6,8 8 8,5,7
-8 8 8,5,7,6,8 8 8,8 8 8 8,7
-8 8 8,7,7,6,8 8 8,6,7
-8 8 8,9 9 9 9,7,6,8 8 8,4,8 8
-9 9 9,6,8 8 8,7,8 8 8,7,5
-8 8 8,7,7,6,8 8 8,8 8 8 8,8 8
-9 9 9,9 9 9 9,7,6,8 8 8,5,6
-8 8 8,8 8 8 8,8 8 8,6,8 8 8,5,8 8
-8 8 8,8 8 8 8,7,6,8 8 8,5,8 8
-8 8 8,5,8 8 8,4,8 8 8,7,6
-9 9 9,8 8 8 8,7,6,8 8 8,5,7
-8 8 8,7,7,4,8 8 8,6,7
-8 8 8,8 8 8 8,8 8 8,6,8 8 8,5,7
-9 9 9,7,7,6,8 8 8,8 8 8 8,7
-8 8 8,6,4,6,8 8 8,8 8 8 8,7
-8 8 8,9 9 9 9,5,5,8 8 8,5,7
-9 9 9,7,5,6,8 8 8,8 8 8 8,7
-9 9 9,6,8 8 8,8 8 8 8,8 8 8,7,5
-8 8 8,9 9 9 9,4,4,8 8 8,8 8 8 8,4
-8 8 8,8 8 8 8,6,6,8 8 8,8 8 8 8,7
-8 8 8,7,8 8 8,5,8 8 8,8 8 8 8,7
-8 8 8,8 8 8 8,7,6,8 8 8,8 8 8 8,7
-9 9 9,8 8 8 8,5,4,8 8 8,8 8 8 8,8 8
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_results1.csv b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_results1.csv
deleted file mode 100644
index 038318438fafe7dffdde74d6066e2bcb67cc686f..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_results1.csv
+++ /dev/null
@@ -1,297 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,644.833,1882.211,1052.832,2143.065,1071.7044,1784.0718,9.298743,8588.015943,0.999999988355867
-c1,644.833,52.371161,1026.8995,32.782291,565.8327,17.406593,0.087285,2340.21253,3.66975882144521
-c2,644.833,52.371161,1026.8995,32.782291,565.8327,19.420124,0.087285,2342.226061,3.6666040564304
-c3,644.833,52.371161,11.604395,21.577915,565.8327,19.420124,0.087285,1315.72658,6.52720361572359
-c4,644.833,52.371161,11.604395,32.782291,565.8327,29.504062,0.087285,1337.014894,6.4232757160837
-c5,644.833,25.893498,26.185581,32.782291,565.8327,910.2389,0.087285,2205.853255,3.89328507424738
-c6,644.833,52.371161,26.185581,32.782291,565.8327,29.504062,0.087285,1351.59608,6.3539806268171
-c7,644.833,39.338749,1026.8995,43.642634,565.8327,39.278371,0.043156,2359.86811,3.6391930306142
-c8,644.833,1882.211,26.185581,32.782291,565.8327,19.420124,0.065565,3171.330261,2.70801681483981
-c9,644.833,25.893498,1026.8995,19.340658,565.8327,39.278371,0.065565,2322.143292,3.69831422666943
-c10,644.833,2917.863,26.185581,32.782291,565.8327,19.420124,0.087285,4207.003981,2.04136144811123
-c11,644.833,52.371161,26.185581,19.340658,565.8327,29.504062,0.087285,1338.154447,6.41780574766451
-c12,793.086,2917.863,1026.8995,32.782291,565.8327,19.420124,0.087285,5355.9709,1.60344705805913
-c13,644.833,52.371161,26.185581,32.782291,565.8327,910.2389,0.087285,2232.330918,3.84710684649905
-c14,793.086,39.338749,11.604395,32.782291,565.8327,910.2389,0.087285,2352.97032,3.64986141347242
-c15,793.086,1882.211,12.946749,21.577915,565.8327,19.420124,0.087285,3295.161773,2.60625009453064
-c16,644.833,52.371161,12.946749,32.782291,565.8327,910.2389,0.087285,2219.092086,3.87005821442697
-c17,644.833,39.338749,1026.8995,1646.3233,565.8327,39.278371,0.043156,3962.548776,2.1672959026487
-c18,793.086,1882.211,11.604395,19.340658,565.8327,910.2389,0.038681,4182.352334,2.0533936530993
-c19,793.086,2917.863,19.669375,32.782291,565.8327,910.2389,0.087285,5239.559551,1.63907208144122
-c20,793.086,52.371161,1026.8995,21.577915,565.8327,910.2389,0.087285,3370.093461,2.54830193511058
-c21,644.833,2917.863,26.185581,32.782291,565.8327,910.2389,0.087285,5097.822757,1.68464385364185
-c22,644.833,2917.863,12.946749,19.340658,565.8327,910.2389,26.09765,5097.152657,1.68486532627572
-c3,1315.72658
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,202.0632,433.2493,211.0585,394.5152,190.18827,306.11015,2.463761,1739.648381,0.999999942517123
-c1,202.0632,27.159744,228.5053,22.719616,115.04262,20.759040,0.054060,616.30358,2.82271295378278
-c2,202.0632,27.159744,228.5053,22.719616,115.04262,20.759040,0.054060,616.30358,2.82271295378278
-c3,202.0632,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,401.406984,4.33387563484056
-c4,202.0632,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,401.406984,4.33387563484056
-c5,202.0632,27.159744,13.608704,22.719616,115.04262,180.10874,0.054060,560.756684,3.10232248746187
-c6,202.0632,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,401.406984,4.33387563484056
-c7,202.0632,27.159744,228.5053,22.719616,115.04262,20.759040,0.054060,616.30358,2.82271295378278
-c8,202.0632,433.2493,13.608704,22.719616,115.04262,20.759040,0.054060,807.49654,2.1543722844469
-c9,202.0632,27.159744,228.5053,22.719616,115.04262,20.759040,0.054060,616.30358,2.82271295378278
-c10,202.0632,725.4132,13.608704,22.719616,115.04262,20.759040,0.054060,1099.66044,1.58198672928648
-c11,202.0632,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,401.406984,4.33387563484056
-c12,270.6243,725.4132,228.5053,22.719616,115.04262,20.759040,0.054060,1383.118136,1.25777271654742
-c13,202.0632,27.159744,13.608704,22.719616,115.04262,180.10874,0.054060,560.756684,3.10232248746187
-c14,270.6243,27.159744,13.608704,22.719616,115.04262,180.10874,0.054060,629.317784,2.76433965286771
-c15,270.6243,433.2493,13.608704,22.719616,115.04262,20.759040,0.054060,876.05764,1.98576908983191
-c16,202.0632,27.159744,13.608704,22.719616,115.04262,180.10874,0.054060,560.756684,3.10232248746187
-c17,202.0632,27.159744,228.5053,345.1436,115.04262,20.759040,0.054060,938.727564,1.85319816142118
-c18,270.6243,433.2493,13.608704,22.719616,115.04262,180.10874,0.054060,1035.40734,1.68015827759553
-c19,270.6243,725.4132,13.608704,22.719616,115.04262,180.10874,0.054060,1327.57124,1.31039916920774
-c20,270.6243,27.159744,228.5053,22.719616,115.04262,180.10874,0.054060,844.21438,2.06067109983709
-c21,202.0632,725.4132,13.608704,22.719616,115.04262,180.10874,0.054060,1259.01014,1.38175872262961
-c22,202.0632,725.4132,13.608704,22.719616,115.04262,180.10874,4.957979,1263.914059,1.37639757305702
-c3,401.406984
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,644.833,1882.211,1052.832,2143.065,1071.7044,1784.0718,9.298743,8588.015943,0.999999988355867
-c1,644.833,168.647541,1026.8995,107.363804,565.8327,72.035782,2.304538,2587.916865,3.31850521448241
-c2,644.833,168.647541,1026.8995,107.363804,565.8327,74.049313,2.304538,2589.930396,3.31592525601119
-c3,644.833,168.647541,61.067218,96.159428,565.8327,74.049313,2.304538,1612.893738,5.32460087617992
-c4,644.833,168.647541,61.067218,107.363804,565.8327,84.133251,2.304538,1634.182052,5.25523787693405
-c5,644.833,142.169878,75.648404,107.363804,565.8327,910.2389,2.304538,2448.391224,3.50761573888015
-c6,644.833,168.647541,75.648404,107.363804,565.8327,84.133251,2.304538,1648.763238,5.20876207340801
-c7,644.833,155.615129,1026.8995,118.224147,565.8327,93.90756,2.260409,2607.572445,3.29349070631513
-c8,644.833,1882.211,75.648404,107.363804,565.8327,74.049313,2.282818,3352.221039,2.56188824868573
-c9,644.833,142.169878,1026.8995,93.922171,565.8327,93.90756,2.282818,2569.847627,3.34183844932537
-c10,644.833,3118.92,75.648404,107.363804,565.8327,74.049313,2.304538,4588.951759,1.87145479117566
-c11,644.833,168.647541,75.648404,93.922171,565.8327,84.133251,2.304538,1635.321605,5.25157583168016
-c12,809.3061,2917.863,1026.8995,107.363804,565.8327,74.049313,2.304538,5503.618955,1.56043066519981
-c13,644.833,168.647541,75.648404,107.363804,565.8327,910.2389,2.304538,2474.868887,3.47008911910536
-c14,809.3061,155.615129,61.067218,107.363804,565.8327,910.2389,2.304538,2611.728389,3.2882499000837
-c15,809.3061,2074.787,62.409572,96.159428,565.8327,74.049313,2.304538,3684.848651,2.33062915829839
-c16,644.833,168.647541,62.409572,107.363804,565.8327,910.2389,2.304538,2461.630055,3.48875151921431
-c17,644.833,155.615129,1026.8995,1646.3233,565.8327,93.90756,2.260409,4135.671598,2.07657100711189
-c18,809.3061,2074.787,61.067218,93.922171,565.8327,910.2389,2.255934,4517.410023,1.90109281848793
-c19,809.3061,2917.863,69.132198,107.363804,565.8327,910.2389,2.304538,5382.04124,1.59568004042867
-c20,809.3061,168.647541,1026.8995,96.159428,565.8327,910.2389,2.304538,3579.388707,2.3992967531789
-c21,644.833,3118.92,75.648404,107.363804,565.8327,910.2389,2.304538,5425.141346,1.58300313982265
-c22,644.833,3118.92,62.409572,93.922171,565.8327,910.2389,26.09765,5422.253993,1.58384608978154
-c3,1612.893738
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,63.122342,0,47.189800,0,39.390943,0.559103,150.262188,0
-c2,0,63.122342,0,47.189800,0,39.390943,0.559103,150.262188,0
-c3,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c4,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c5,0,63.122342,29.446347,47.189800,0,0,0.559103,140.317592,0
-c6,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c7,0,63.122342,0,47.189800,0,39.390943,0.559103,150.262188,0
-c8,0,0,29.446347,47.189800,0,39.390943,0.559103,116.586193,0
-c9,0,63.122342,0,47.189800,0,39.390943,0.559103,150.262188,0
-c10,0,0,29.446347,47.189800,0,39.390943,0.559103,116.586193,0
-c11,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c12,0,0,0,47.189800,0,39.390943,0.559103,87.139846,0
-c13,0,63.122342,29.446347,47.189800,0,0,0.559103,140.317592,0
-c14,0,63.122342,29.446347,47.189800,0,0,0.559103,140.317592,0
-c15,0,0,29.446347,47.189800,0,39.390943,0.559103,116.586193,0
-c16,0,63.122342,29.446347,47.189800,0,0,0.559103,140.317592,0
-c17,0,63.122342,0,0,0,39.390943,0.559103,103.072388,0
-c18,0,0,29.446347,47.189800,0,0,0.559103,77.19525,0
-c19,0,0,29.446347,47.189800,0,0,0.559103,77.19525,0
-c20,0,63.122342,0,47.189800,0,0,0.559103,110.871245,0
-c21,0,0,29.446347,47.189800,0,0,0.559103,77.19525,0
-c22,0,0,29.446347,47.189800,0,0,0,76.636147,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,53.154038,0,27.391713,0,15.238246,1.658150,97.442147,0
-c2,0,53.154038,0,27.391713,0,15.238246,1.658150,97.442147,0
-c3,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c4,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c5,0,53.154038,20.016476,27.391713,0,0,1.658150,102.220377,0
-c6,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c7,0,53.154038,0,27.391713,0,15.238246,1.658150,97.442147,0
-c8,0,0,20.016476,27.391713,0,15.238246,1.658150,64.304585,0
-c9,0,53.154038,0,27.391713,0,15.238246,1.658150,97.442147,0
-c10,0,0,20.016476,27.391713,0,15.238246,1.658150,64.304585,0
-c11,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c12,0,0,0,27.391713,0,15.238246,1.658150,44.288109,0
-c13,0,53.154038,20.016476,27.391713,0,0,1.658150,102.220377,0
-c14,0,53.154038,20.016476,27.391713,0,0,1.658150,102.220377,0
-c15,0,0,20.016476,27.391713,0,15.238246,1.658150,64.304585,0
-c16,0,53.154038,20.016476,27.391713,0,0,1.658150,102.220377,0
-c17,0,53.154038,0,0,0,15.238246,1.658150,70.050434,0
-c18,0,0,20.016476,27.391713,0,0,1.658150,49.066339,0
-c19,0,0,20.016476,27.391713,0,0,1.658150,49.066339,0
-c20,0,53.154038,0,27.391713,0,0,1.658150,82.203901,0
-c21,0,0,20.016476,27.391713,0,0,1.658150,49.066339,0
-c22,0,0,20.016476,27.391713,0,0,0,47.408189,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,14.128858,0,8.244707,0,5.486035,0.351348,28.210948,0
-c2,0,14.128858,0,8.244707,0,5.486035,0.351348,28.210948,0
-c3,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c4,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c5,0,14.128858,5.690699,8.244707,0,0,0.351348,28.415612,0
-c6,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c7,0,14.128858,0,8.244707,0,5.486035,0.351348,28.210948,0
-c8,0,0,5.690699,8.244707,0,5.486035,0.351348,19.772789,0
-c9,0,14.128858,0,8.244707,0,5.486035,0.351348,28.210948,0
-c10,0,0,5.690699,8.244707,0,5.486035,0.351348,19.772789,0
-c11,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c12,0,0,0,8.244707,0,5.486035,0.351348,14.08209,0
-c13,0,14.128858,5.690699,8.244707,0,0,0.351348,28.415612,0
-c14,0,14.128858,5.690699,8.244707,0,0,0.351348,28.415612,0
-c15,0,0,5.690699,8.244707,0,5.486035,0.351348,19.772789,0
-c16,0,14.128858,5.690699,8.244707,0,0,0.351348,28.415612,0
-c17,0,14.128858,0,0,0,5.486035,0.351348,19.966241,0
-c18,0,0,5.690699,8.244707,0,0,0.351348,14.286754,0
-c19,0,0,5.690699,8.244707,0,0,0.351348,14.286754,0
-c20,0,14.128858,0,8.244707,0,0,0.351348,22.724913,0
-c21,0,0,5.690699,8.244707,0,0,0.351348,14.286754,0
-c22,0,0,5.690699,8.244707,0,0,0,13.935406,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,201.057,0,0,0,0,0,201.057,0
-c11,0,0,0,0,0,0,0,0,0
-c12,16.2201,0,0,0,0,0,0,16.2201,0
-c13,0,0,0,0,0,0,0,0,0
-c14,16.2201,0,0,0,0,0,0,16.2201,0
-c15,16.2201,192.576,0,0,0,0,0,208.7961,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0
-c18,16.2201,192.576,0,0,0,0,0,208.7961,0
-c19,16.2201,0,0,0,0,0,0,16.2201,0
-c20,16.2201,0,0,0,0,0,0,16.2201,0
-c21,0,201.057,0,0,0,0,0,201.057,0
-c22,0,201.057,0,0,0,0,0,201.057,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,56.8745,0,0,0,0,0,56.8745,0
-c11,0,0,0,0,0,0,0,0,0
-c12,6.64024,0,0,0,0,0,0,6.64024,0
-c13,0,0,0,0,0,0,0,0,0
-c14,6.64024,0,0,0,0,0,0,6.64024,0
-c15,6.64024,41.8404,0,0,0,0,0,48.48064,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0
-c18,6.64024,41.8404,0,0,0,0,0,48.48064,0
-c19,6.64024,0,0,0,0,0,0,6.64024,0
-c20,6.64024,0,0,0,0,0,0,6.64024,0
-c21,0,56.8745,0,0,0,0,0,56.8745,0
-c22,0,56.8745,0,0,0,0,0,56.8745,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,202.0632,433.2493,211.0585,394.5152,190.18827,306.11015,2.463761,1739.648381,0.999999942517123
-c1,202.0632,41.288602,228.5053,30.964323,115.04262,26.245075,0.405408,644.514528,2.69916043084751
-c2,202.0632,41.288602,228.5053,30.964323,115.04262,26.245075,0.405408,644.514528,2.69916043084751
-c3,202.0632,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,435.308631,3.996355361409
-c4,202.0632,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,435.308631,3.996355361409
-c5,202.0632,41.288602,19.299403,30.964323,115.04262,180.10874,0.405408,589.172296,2.9526983830383
-c6,202.0632,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,435.308631,3.996355361409
-c7,202.0632,41.288602,228.5053,30.964323,115.04262,26.245075,0.405408,644.514528,2.69916043084751
-c8,202.0632,433.2493,19.299403,30.964323,115.04262,26.245075,0.405408,827.269329,2.10288005336167
-c9,202.0632,41.288602,228.5053,30.964323,115.04262,26.245075,0.405408,644.514528,2.69916043084751
-c10,202.0632,782.2877,19.299403,30.964323,115.04262,26.245075,0.405408,1176.307729,1.47890572357995
-c11,202.0632,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,435.308631,3.996355361409
-c12,277.26454,725.4132,228.5053,30.964323,115.04262,26.245075,0.405408,1403.840466,1.23920651898301
-c13,202.0632,41.288602,19.299403,30.964323,115.04262,180.10874,0.405408,589.172296,2.9526983830383
-c14,277.26454,41.288602,19.299403,30.964323,115.04262,180.10874,0.405408,664.373636,2.61847855617218
-c15,277.26454,475.0897,19.299403,30.964323,115.04262,26.245075,0.405408,944.311069,1.84224060681421
-c16,202.0632,41.288602,19.299403,30.964323,115.04262,180.10874,0.405408,589.172296,2.9526983830383
-c17,202.0632,41.288602,228.5053,345.1436,115.04262,26.245075,0.405408,958.693805,1.81460252529716
-c18,277.26454,475.0897,19.299403,30.964323,115.04262,180.10874,0.405408,1098.174734,1.58412697790887
-c19,277.26454,725.4132,19.299403,30.964323,115.04262,180.10874,0.405408,1348.498234,1.2900634262111
-c20,277.26454,41.288602,228.5053,30.964323,115.04262,180.10874,0.405408,873.579533,1.99140217478033
-c21,202.0632,782.2877,19.299403,30.964323,115.04262,180.10874,0.405408,1330.171394,1.30783766517853
-c22,202.0632,782.2877,19.299403,30.964323,115.04262,180.10874,4.957979,1334.723965,1.30337680020776
-c3,435.308631
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_results2.csv b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_results2.csv
deleted file mode 100644
index a46a2216abbef609554dea8b8df6f572e60dff2e..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_promise_results2.csv
+++ /dev/null
@@ -1,451 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,644.833,1882.211,1052.832,2143.065,1071.7044,1784.0718,9.298743,8588.015943,0.999999988355867
-c1,644.833,52.371161,26.185581,1646.3233,565.8327,19.420124,26.09765,2981.063516,2.88085631480868
-c2,793.086,52.371161,26.185581,1646.3233,565.8327,19.420124,0.038681,3103.257547,2.76741956998068
-c3,793.086,39.338749,26.185581,1646.3233,565.8327,39.278371,0.087285,3110.131986,2.7613026410223
-c4,793.086,39.338749,12.946749,32.782291,565.8327,19.420124,0.065565,1463.472178,5.86824641101948
-c5,793.086,52.371161,26.185581,32.782291,565.8327,17.406593,0.065565,1487.729891,5.77256356661025
-c6,793.086,52.371161,26.185581,43.642634,565.8327,19.420124,0.087285,1500.625485,5.72295716456147
-c7,793.086,52.371161,19.669375,32.782291,565.8327,19.420124,0.038681,1483.200332,5.79019244986304
-c8,793.086,52.371161,12.946749,43.642634,565.8327,19.420124,0.087285,1487.386653,5.77389567688317
-c9,793.086,52.371161,1026.8995,32.782291,565.8327,17.406593,0.087285,2488.46553,3.451129016799
-c10,644.833,52.371161,1026.8995,32.782291,565.8327,19.420124,0.087285,2342.226061,3.6666040564304
-c11,793.086,52.371161,26.185581,21.577915,565.8327,19.420124,0.087285,1478.560766,5.80836145503664
-c12,644.833,52.371161,11.604395,21.577915,565.8327,19.420124,0.087285,1315.72658,6.52720361572359
-c13,793.086,52.371161,11.604395,32.782291,565.8327,29.504062,0.087285,1485.267894,5.78213223316788
-c14,793.086,52.371161,26.185581,32.782291,565.8327,19.420124,0.087285,1489.765142,5.76467734706352
-c15,793.086,25.893498,26.185581,32.782291,565.8327,910.2389,0.087285,2354.106255,3.64810023334736
-c16,793.086,52.371161,26.185581,32.782291,565.8327,29.504062,0.087285,1499.84908,5.72591968413784
-c17,793.086,1882.211,26.185581,32.782291,565.8327,17.406593,26.09765,3343.601815,2.56849235086049
-c18,644.833,39.338749,1026.8995,43.642634,565.8327,39.278371,0.043156,2359.86811,3.6391930306142
-c19,793.086,52.371161,26.185581,32.782291,565.8327,910.2389,26.09765,2406.594283,3.56853485725101
-c20,644.833,1882.211,26.185581,32.782291,565.8327,19.420124,0.065565,3171.330261,2.70801681483981
-c21,793.086,2917.863,1026.8995,32.782291,565.8327,19.420124,26.09765,5381.981265,1.59569782215327
-c22,793.086,2917.863,26.185581,32.782291,565.8327,19.420124,26.09765,4381.267346,1.96016701761511
-c23,793.086,25.893498,1026.8995,19.340658,565.8327,39.278371,0.065565,2470.396292,3.47637163445145
-c24,644.833,2917.863,26.185581,32.782291,565.8327,19.420124,0.087285,4207.003981,2.04136144811123
-c25,793.086,52.371161,26.185581,19.340658,565.8327,29.504062,0.087285,1486.407447,5.77769936672691
-c26,793.086,2917.863,1026.8995,32.782291,565.8327,19.420124,0.087285,5355.9709,1.60344705805913
-c27,644.833,52.371161,26.185581,32.782291,565.8327,910.2389,0.087285,2232.330918,3.84710684649905
-c28,793.086,39.338749,11.604395,32.782291,565.8327,910.2389,0.087285,2352.97032,3.64986141347242
-c29,793.086,1882.211,12.946749,21.577915,565.8327,19.420124,0.087285,3295.161773,2.60625009453064
-c30,644.833,52.371161,12.946749,32.782291,565.8327,910.2389,0.087285,2219.092086,3.87005821442697
-c31,644.833,39.338749,1026.8995,1646.3233,565.8327,39.278371,0.043156,3962.548776,2.1672959026487
-c32,793.086,1882.211,11.604395,19.340658,565.8327,910.2389,0.038681,4182.352334,2.0533936530993
-c33,793.086,2917.863,19.669375,32.782291,565.8327,910.2389,0.087285,5239.559551,1.63907208144122
-c34,793.086,52.371161,1026.8995,21.577915,565.8327,910.2389,0.087285,3370.093461,2.54830193511058
-c35,793.086,2917.863,26.185581,32.782291,565.8327,910.2389,0.087285,5246.075757,1.63703617276916
-c36,644.833,2917.863,12.946749,19.340658,565.8327,910.2389,26.09765,5097.152657,1.68486532627572
-c12,1315.72658
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,202.0632,433.2493,211.0585,394.5152,190.18827,306.11015,2.463761,1739.648381,0.999999942517123
-c1,202.0632,27.159744,13.608704,345.1436,115.04262,20.759040,4.957979,728.734887,2.38721676883067
-c2,270.6243,27.159744,13.608704,345.1436,115.04262,20.759040,0.054060,792.392068,2.19543863664236
-c3,270.6243,27.159744,13.608704,345.1436,115.04262,20.759040,0.054060,792.392068,2.19543863664236
-c4,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c5,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c6,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c7,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c8,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c9,270.6243,27.159744,228.5053,22.719616,115.04262,20.759040,0.054060,684.86468,2.5401340991721
-c10,202.0632,27.159744,228.5053,22.719616,115.04262,20.759040,0.054060,616.30358,2.82271295378278
-c11,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c12,202.0632,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,401.406984,4.33387563484056
-c13,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c14,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c15,270.6243,27.159744,13.608704,22.719616,115.04262,180.10874,0.054060,629.317784,2.76433965286771
-c16,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c17,270.6243,433.2493,13.608704,22.719616,115.04262,20.759040,4.957979,880.961559,1.97471520267376
-c18,202.0632,27.159744,228.5053,22.719616,115.04262,20.759040,0.054060,616.30358,2.82271295378278
-c19,270.6243,27.159744,13.608704,22.719616,115.04262,180.10874,4.957979,634.221703,2.74296527298668
-c20,202.0632,433.2493,13.608704,22.719616,115.04262,20.759040,0.054060,807.49654,2.1543722844469
-c21,270.6243,725.4132,228.5053,22.719616,115.04262,20.759040,4.957979,1388.022055,1.25332897225981
-c22,270.6243,725.4132,13.608704,22.719616,115.04262,20.759040,4.957979,1173.125459,1.48291746578509
-c23,270.6243,27.159744,228.5053,22.719616,115.04262,20.759040,0.054060,684.86468,2.5401340991721
-c24,202.0632,725.4132,13.608704,22.719616,115.04262,20.759040,0.054060,1099.66044,1.58198672928648
-c25,270.6243,27.159744,13.608704,22.719616,115.04262,20.759040,0.054060,469.968084,3.70163011077363
-c26,270.6243,725.4132,228.5053,22.719616,115.04262,20.759040,0.054060,1383.118136,1.25777271654742
-c27,202.0632,27.159744,13.608704,22.719616,115.04262,180.10874,0.054060,560.756684,3.10232248746187
-c28,270.6243,27.159744,13.608704,22.719616,115.04262,180.10874,0.054060,629.317784,2.76433965286771
-c29,270.6243,433.2493,13.608704,22.719616,115.04262,20.759040,0.054060,876.05764,1.98576908983191
-c30,202.0632,27.159744,13.608704,22.719616,115.04262,180.10874,0.054060,560.756684,3.10232248746187
-c31,202.0632,27.159744,228.5053,345.1436,115.04262,20.759040,0.054060,938.727564,1.85319816142118
-c32,270.6243,433.2493,13.608704,22.719616,115.04262,180.10874,0.054060,1035.40734,1.68015827759553
-c33,270.6243,725.4132,13.608704,22.719616,115.04262,180.10874,0.054060,1327.57124,1.31039916920774
-c34,270.6243,27.159744,228.5053,22.719616,115.04262,180.10874,0.054060,844.21438,2.06067109983709
-c35,270.6243,725.4132,13.608704,22.719616,115.04262,180.10874,0.054060,1327.57124,1.31039916920774
-c36,202.0632,725.4132,13.608704,22.719616,115.04262,180.10874,4.957979,1263.914059,1.37639757305702
-c12,401.406984
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,644.833,1882.211,1052.832,2143.065,1071.7044,1784.0718,9.298743,8588.015943,0.999999988355867
-c1,644.833,168.647541,75.648404,1646.3233,565.8327,74.049313,26.09765,3201.431908,2.6825545323279
-c2,809.3061,168.647541,75.648404,1646.3233,565.8327,74.049313,2.255934,3342.063292,2.56967475947865
-c3,809.3061,155.615129,75.648404,1646.3233,565.8327,93.90756,2.304538,3348.937731,2.56439993107773
-c4,809.3061,155.615129,62.409572,107.363804,565.8327,74.049313,2.282818,1776.859436,4.83325539751613
-c5,809.3061,168.647541,75.648404,107.363804,565.8327,72.035782,2.282818,1801.117149,4.76816040031162
-c6,809.3061,168.647541,75.648404,118.224147,565.8327,74.049313,2.304538,1814.012743,4.73426413497559
-c7,809.3061,168.647541,69.132198,107.363804,565.8327,74.049313,2.255934,1796.58759,4.78018189192869
-c8,809.3061,168.647541,62.409572,118.224147,565.8327,74.049313,2.304538,1800.773911,4.76906923941607
-c9,809.3061,168.647541,1026.8995,107.363804,565.8327,72.035782,2.304538,2752.389965,3.12020307448683
-c10,644.833,168.647541,1026.8995,107.363804,565.8327,74.049313,2.304538,2589.930396,3.31592525601119
-c11,809.3061,168.647541,75.648404,96.159428,565.8327,74.049313,2.304538,1791.948024,4.79255834919471
-c12,644.833,168.647541,61.067218,96.159428,565.8327,74.049313,2.304538,1612.893738,5.32460087617992
-c13,809.3061,168.647541,61.067218,107.363804,565.8327,84.133251,2.304538,1798.655152,4.77468705214667
-c14,809.3061,168.647541,75.648404,107.363804,565.8327,74.049313,2.304538,1803.1524,4.76277849100395
-c15,809.3061,142.169878,75.648404,107.363804,565.8327,910.2389,2.304538,2612.864324,3.28682034326631
-c16,809.3061,168.647541,75.648404,107.363804,565.8327,84.133251,2.304538,1813.236338,4.73629128723697
-c17,809.3061,2074.787,75.648404,107.363804,565.8327,72.035782,26.09765,3731.07144,2.30175590334566
-c18,644.833,155.615129,1026.8995,118.224147,565.8327,93.90756,2.260409,2607.572445,3.29349070631513
-c19,809.3061,168.647541,75.648404,107.363804,565.8327,910.2389,26.09765,2663.135099,3.22477655142134
-c20,644.833,1882.211,75.648404,107.363804,565.8327,74.049313,2.282818,3352.221039,2.56188824868573
-c21,809.3061,2917.863,1026.8995,107.363804,565.8327,74.049313,26.09765,5527.412067,1.55371368798451
-c22,809.3061,2917.863,75.648404,107.363804,565.8327,74.049313,26.09765,4576.160971,1.87668567818206
-c23,809.3061,142.169878,1026.8995,93.922171,565.8327,93.90756,2.282818,2734.320727,3.14082234176685
-c24,644.833,3118.92,75.648404,107.363804,565.8327,74.049313,2.304538,4588.951759,1.87145479117566
-c25,809.3061,168.647541,75.648404,93.922171,565.8327,84.133251,2.304538,1799.794705,4.77166392476613
-c26,809.3061,2917.863,1026.8995,107.363804,565.8327,74.049313,2.304538,5503.618955,1.56043066519981
-c27,644.833,168.647541,75.648404,107.363804,565.8327,910.2389,2.304538,2474.868887,3.47008911910536
-c28,809.3061,155.615129,61.067218,107.363804,565.8327,910.2389,2.304538,2611.728389,3.2882499000837
-c29,809.3061,2074.787,62.409572,96.159428,565.8327,74.049313,2.304538,3684.848651,2.33062915829839
-c30,644.833,168.647541,62.409572,107.363804,565.8327,910.2389,2.304538,2461.630055,3.48875151921431
-c31,644.833,155.615129,1026.8995,1646.3233,565.8327,93.90756,2.260409,4135.671598,2.07657100711189
-c32,809.3061,2074.787,61.067218,93.922171,565.8327,910.2389,2.255934,4517.410023,1.90109281848793
-c33,809.3061,2917.863,69.132198,107.363804,565.8327,910.2389,2.304538,5382.04124,1.59568004042867
-c34,809.3061,168.647541,1026.8995,96.159428,565.8327,910.2389,2.304538,3579.388707,2.3992967531789
-c35,809.3061,2917.863,75.648404,107.363804,565.8327,910.2389,2.304538,5388.557446,1.5937504368632
-c36,644.833,3118.92,62.409572,93.922171,565.8327,910.2389,26.09765,5422.253993,1.58384608978154
-c12,1612.893738
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,63.122342,29.446347,0,0,39.390943,0,131.959632,0
-c2,0,63.122342,29.446347,0,0,39.390943,0.559103,132.518735,0
-c3,0,63.122342,29.446347,0,0,39.390943,0.559103,132.518735,0
-c4,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c5,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c6,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c7,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c8,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c9,0,63.122342,0,47.189800,0,39.390943,0.559103,150.262188,0
-c10,0,63.122342,0,47.189800,0,39.390943,0.559103,150.262188,0
-c11,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c12,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c13,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c14,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c15,0,63.122342,29.446347,47.189800,0,0,0.559103,140.317592,0
-c16,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c17,0,0,29.446347,47.189800,0,39.390943,0,116.02709,0
-c18,0,63.122342,0,47.189800,0,39.390943,0.559103,150.262188,0
-c19,0,63.122342,29.446347,47.189800,0,0,0,139.758489,0
-c20,0,0,29.446347,47.189800,0,39.390943,0.559103,116.586193,0
-c21,0,0,0,47.189800,0,39.390943,0,86.580743,0
-c22,0,0,29.446347,47.189800,0,39.390943,0,116.02709,0
-c23,0,63.122342,0,47.189800,0,39.390943,0.559103,150.262188,0
-c24,0,0,29.446347,47.189800,0,39.390943,0.559103,116.586193,0
-c25,0,63.122342,29.446347,47.189800,0,39.390943,0.559103,179.708535,0
-c26,0,0,0,47.189800,0,39.390943,0.559103,87.139846,0
-c27,0,63.122342,29.446347,47.189800,0,0,0.559103,140.317592,0
-c28,0,63.122342,29.446347,47.189800,0,0,0.559103,140.317592,0
-c29,0,0,29.446347,47.189800,0,39.390943,0.559103,116.586193,0
-c30,0,63.122342,29.446347,47.189800,0,0,0.559103,140.317592,0
-c31,0,63.122342,0,0,0,39.390943,0.559103,103.072388,0
-c32,0,0,29.446347,47.189800,0,0,0.559103,77.19525,0
-c33,0,0,29.446347,47.189800,0,0,0.559103,77.19525,0
-c34,0,63.122342,0,47.189800,0,0,0.559103,110.871245,0
-c35,0,0,29.446347,47.189800,0,0,0.559103,77.19525,0
-c36,0,0,29.446347,47.189800,0,0,0,76.636147,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,53.154038,20.016476,0,0,15.238246,0,88.40876,0
-c2,0,53.154038,20.016476,0,0,15.238246,1.658150,90.06691,0
-c3,0,53.154038,20.016476,0,0,15.238246,1.658150,90.06691,0
-c4,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c5,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c6,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c7,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c8,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c9,0,53.154038,0,27.391713,0,15.238246,1.658150,97.442147,0
-c10,0,53.154038,0,27.391713,0,15.238246,1.658150,97.442147,0
-c11,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c12,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c13,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c14,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c15,0,53.154038,20.016476,27.391713,0,0,1.658150,102.220377,0
-c16,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c17,0,0,20.016476,27.391713,0,15.238246,0,62.646435,0
-c18,0,53.154038,0,27.391713,0,15.238246,1.658150,97.442147,0
-c19,0,53.154038,20.016476,27.391713,0,0,0,100.562227,0
-c20,0,0,20.016476,27.391713,0,15.238246,1.658150,64.304585,0
-c21,0,0,0,27.391713,0,15.238246,0,42.629959,0
-c22,0,0,20.016476,27.391713,0,15.238246,0,62.646435,0
-c23,0,53.154038,0,27.391713,0,15.238246,1.658150,97.442147,0
-c24,0,0,20.016476,27.391713,0,15.238246,1.658150,64.304585,0
-c25,0,53.154038,20.016476,27.391713,0,15.238246,1.658150,117.458623,0
-c26,0,0,0,27.391713,0,15.238246,1.658150,44.288109,0
-c27,0,53.154038,20.016476,27.391713,0,0,1.658150,102.220377,0
-c28,0,53.154038,20.016476,27.391713,0,0,1.658150,102.220377,0
-c29,0,0,20.016476,27.391713,0,15.238246,1.658150,64.304585,0
-c30,0,53.154038,20.016476,27.391713,0,0,1.658150,102.220377,0
-c31,0,53.154038,0,0,0,15.238246,1.658150,70.050434,0
-c32,0,0,20.016476,27.391713,0,0,1.658150,49.066339,0
-c33,0,0,20.016476,27.391713,0,0,1.658150,49.066339,0
-c34,0,53.154038,0,27.391713,0,0,1.658150,82.203901,0
-c35,0,0,20.016476,27.391713,0,0,1.658150,49.066339,0
-c36,0,0,20.016476,27.391713,0,0,0,47.408189,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,14.128858,5.690699,0,0,5.486035,0,25.305592,0
-c2,0,14.128858,5.690699,0,0,5.486035,0.351348,25.65694,0
-c3,0,14.128858,5.690699,0,0,5.486035,0.351348,25.65694,0
-c4,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c5,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c6,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c7,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c8,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c9,0,14.128858,0,8.244707,0,5.486035,0.351348,28.210948,0
-c10,0,14.128858,0,8.244707,0,5.486035,0.351348,28.210948,0
-c11,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c12,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c13,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c14,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c15,0,14.128858,5.690699,8.244707,0,0,0.351348,28.415612,0
-c16,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c17,0,0,5.690699,8.244707,0,5.486035,0,19.421441,0
-c18,0,14.128858,0,8.244707,0,5.486035,0.351348,28.210948,0
-c19,0,14.128858,5.690699,8.244707,0,0,0,28.064264,0
-c20,0,0,5.690699,8.244707,0,5.486035,0.351348,19.772789,0
-c21,0,0,0,8.244707,0,5.486035,0,13.730742,0
-c22,0,0,5.690699,8.244707,0,5.486035,0,19.421441,0
-c23,0,14.128858,0,8.244707,0,5.486035,0.351348,28.210948,0
-c24,0,0,5.690699,8.244707,0,5.486035,0.351348,19.772789,0
-c25,0,14.128858,5.690699,8.244707,0,5.486035,0.351348,33.901647,0
-c26,0,0,0,8.244707,0,5.486035,0.351348,14.08209,0
-c27,0,14.128858,5.690699,8.244707,0,0,0.351348,28.415612,0
-c28,0,14.128858,5.690699,8.244707,0,0,0.351348,28.415612,0
-c29,0,0,5.690699,8.244707,0,5.486035,0.351348,19.772789,0
-c30,0,14.128858,5.690699,8.244707,0,0,0.351348,28.415612,0
-c31,0,14.128858,0,0,0,5.486035,0.351348,19.966241,0
-c32,0,0,5.690699,8.244707,0,0,0.351348,14.286754,0
-c33,0,0,5.690699,8.244707,0,0,0.351348,14.286754,0
-c34,0,14.128858,0,8.244707,0,0,0.351348,22.724913,0
-c35,0,0,5.690699,8.244707,0,0,0.351348,14.286754,0
-c36,0,0,5.690699,8.244707,0,0,0,13.935406,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,16.2201,0,0,0,0,0,0,16.2201,0
-c3,16.2201,0,0,0,0,0,0,16.2201,0
-c4,16.2201,0,0,0,0,0,0,16.2201,0
-c5,16.2201,0,0,0,0,0,0,16.2201,0
-c6,16.2201,0,0,0,0,0,0,16.2201,0
-c7,16.2201,0,0,0,0,0,0,16.2201,0
-c8,16.2201,0,0,0,0,0,0,16.2201,0
-c9,16.2201,0,0,0,0,0,0,16.2201,0
-c10,0,0,0,0,0,0,0,0,0
-c11,16.2201,0,0,0,0,0,0,16.2201,0
-c12,0,0,0,0,0,0,0,0,0
-c13,16.2201,0,0,0,0,0,0,16.2201,0
-c14,16.2201,0,0,0,0,0,0,16.2201,0
-c15,16.2201,0,0,0,0,0,0,16.2201,0
-c16,16.2201,0,0,0,0,0,0,16.2201,0
-c17,16.2201,192.576,0,0,0,0,0,208.7961,0
-c18,0,0,0,0,0,0,0,0,0
-c19,16.2201,0,0,0,0,0,0,16.2201,0
-c20,0,0,0,0,0,0,0,0,0
-c21,16.2201,0,0,0,0,0,0,16.2201,0
-c22,16.2201,0,0,0,0,0,0,16.2201,0
-c23,16.2201,0,0,0,0,0,0,16.2201,0
-c24,0,201.057,0,0,0,0,0,201.057,0
-c25,16.2201,0,0,0,0,0,0,16.2201,0
-c26,16.2201,0,0,0,0,0,0,16.2201,0
-c27,0,0,0,0,0,0,0,0,0
-c28,16.2201,0,0,0,0,0,0,16.2201,0
-c29,16.2201,192.576,0,0,0,0,0,208.7961,0
-c30,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0
-c32,16.2201,192.576,0,0,0,0,0,208.7961,0
-c33,16.2201,0,0,0,0,0,0,16.2201,0
-c34,16.2201,0,0,0,0,0,0,16.2201,0
-c35,16.2201,0,0,0,0,0,0,16.2201,0
-c36,0,201.057,0,0,0,0,0,201.057,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,6.64024,0,0,0,0,0,0,6.64024,0
-c3,6.64024,0,0,0,0,0,0,6.64024,0
-c4,6.64024,0,0,0,0,0,0,6.64024,0
-c5,6.64024,0,0,0,0,0,0,6.64024,0
-c6,6.64024,0,0,0,0,0,0,6.64024,0
-c7,6.64024,0,0,0,0,0,0,6.64024,0
-c8,6.64024,0,0,0,0,0,0,6.64024,0
-c9,6.64024,0,0,0,0,0,0,6.64024,0
-c10,0,0,0,0,0,0,0,0,0
-c11,6.64024,0,0,0,0,0,0,6.64024,0
-c12,0,0,0,0,0,0,0,0,0
-c13,6.64024,0,0,0,0,0,0,6.64024,0
-c14,6.64024,0,0,0,0,0,0,6.64024,0
-c15,6.64024,0,0,0,0,0,0,6.64024,0
-c16,6.64024,0,0,0,0,0,0,6.64024,0
-c17,6.64024,41.8404,0,0,0,0,0,48.48064,0
-c18,0,0,0,0,0,0,0,0,0
-c19,6.64024,0,0,0,0,0,0,6.64024,0
-c20,0,0,0,0,0,0,0,0,0
-c21,6.64024,0,0,0,0,0,0,6.64024,0
-c22,6.64024,0,0,0,0,0,0,6.64024,0
-c23,6.64024,0,0,0,0,0,0,6.64024,0
-c24,0,56.8745,0,0,0,0,0,56.8745,0
-c25,6.64024,0,0,0,0,0,0,6.64024,0
-c26,6.64024,0,0,0,0,0,0,6.64024,0
-c27,0,0,0,0,0,0,0,0,0
-c28,6.64024,0,0,0,0,0,0,6.64024,0
-c29,6.64024,41.8404,0,0,0,0,0,48.48064,0
-c30,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0
-c32,6.64024,41.8404,0,0,0,0,0,48.48064,0
-c33,6.64024,0,0,0,0,0,0,6.64024,0
-c34,6.64024,0,0,0,0,0,0,6.64024,0
-c35,6.64024,0,0,0,0,0,0,6.64024,0
-c36,0,56.8745,0,0,0,0,0,56.8745,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,202.0632,433.2493,211.0585,394.5152,190.18827,306.11015,2.463761,1739.648381,0.999999942517123
-c1,202.0632,41.288602,19.299403,345.1436,115.04262,26.245075,4.957979,754.040479,2.30710180519343
-c2,277.26454,41.288602,19.299403,345.1436,115.04262,26.245075,0.405408,824.689248,2.1094590165605
-c3,277.26454,41.288602,19.299403,345.1436,115.04262,26.245075,0.405408,824.689248,2.1094590165605
-c4,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c5,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c6,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c7,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c8,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c9,277.26454,41.288602,228.5053,30.964323,115.04262,26.245075,0.405408,719.715868,2.41713183859775
-c10,202.0632,41.288602,228.5053,30.964323,115.04262,26.245075,0.405408,644.514528,2.69916043084751
-c11,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c12,202.0632,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,435.308631,3.996355361409
-c13,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c14,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c15,277.26454,41.288602,19.299403,30.964323,115.04262,180.10874,0.405408,664.373636,2.61847855617218
-c16,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c17,277.26454,475.0897,19.299403,30.964323,115.04262,26.245075,4.957979,948.86364,1.83340168631589
-c18,202.0632,41.288602,228.5053,30.964323,115.04262,26.245075,0.405408,644.514528,2.69916043084751
-c19,277.26454,41.288602,19.299403,30.964323,115.04262,180.10874,4.957979,668.926207,2.60065774479998
-c20,202.0632,433.2493,19.299403,30.964323,115.04262,26.245075,0.405408,827.269329,2.10288005336167
-c21,277.26454,725.4132,228.5053,30.964323,115.04262,26.245075,4.957979,1408.393037,1.23520083654029
-c22,277.26454,725.4132,19.299403,30.964323,115.04262,26.245075,4.957979,1199.18714,1.45068953618953
-c23,277.26454,41.288602,228.5053,30.964323,115.04262,26.245075,0.405408,719.715868,2.41713183859775
-c24,202.0632,782.2877,19.299403,30.964323,115.04262,26.245075,0.405408,1176.307729,1.47890572357995
-c25,277.26454,41.288602,19.299403,30.964323,115.04262,26.245075,0.405408,510.509971,3.40766711534668
-c26,277.26454,725.4132,228.5053,30.964323,115.04262,26.245075,0.405408,1403.840466,1.23920651898301
-c27,202.0632,41.288602,19.299403,30.964323,115.04262,180.10874,0.405408,589.172296,2.9526983830383
-c28,277.26454,41.288602,19.299403,30.964323,115.04262,180.10874,0.405408,664.373636,2.61847855617218
-c29,277.26454,475.0897,19.299403,30.964323,115.04262,26.245075,0.405408,944.311069,1.84224060681421
-c30,202.0632,41.288602,19.299403,30.964323,115.04262,180.10874,0.405408,589.172296,2.9526983830383
-c31,202.0632,41.288602,228.5053,345.1436,115.04262,26.245075,0.405408,958.693805,1.81460252529716
-c32,277.26454,475.0897,19.299403,30.964323,115.04262,180.10874,0.405408,1098.174734,1.58412697790887
-c33,277.26454,725.4132,19.299403,30.964323,115.04262,180.10874,0.405408,1348.498234,1.2900634262111
-c34,277.26454,41.288602,228.5053,30.964323,115.04262,180.10874,0.405408,873.579533,1.99140217478033
-c35,277.26454,725.4132,19.299403,30.964323,115.04262,180.10874,0.405408,1348.498234,1.2900634262111
-c36,202.0632,782.2877,19.299403,30.964323,115.04262,180.10874,4.957979,1334.723965,1.30337680020776
-c12,435.308631
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_results1.csv b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_results1.csv
deleted file mode 100644
index 80534cd08b0ee64c04faf405926188256a6da2a4..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_results1.csv
+++ /dev/null
@@ -1,220 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,644.833,1882.211,1052.832,2143.065,1071.7044,1784.0718,9.298743,8588.015943,0.999999988355867
-c1,644.833,52.371161,26.185581,43.642634,1071.7044,1784.0718,9.298743,3632.107319,2.36447190357725
-c2,644.833,52.371161,26.185581,43.642634,1071.7044,910.2389,9.298743,2758.274419,3.11354648851759
-c3,644.833,52.371161,26.185581,43.642634,565.8327,1784.0718,9.298743,3126.235619,2.7470788241608
-c4,644.833,52.371161,26.185581,43.642634,1071.7044,39.278371,9.298743,1887.31389,4.55039065492224
-c5,644.833,52.371161,26.185581,43.642634,1071.7044,39.278371,9.298743,1887.31389,4.55039065492224
-c6,644.833,52.371161,1052.832,43.642634,1071.7044,39.278371,9.298743,2913.960309,2.94719719474404
-c7,644.833,52.371161,26.185581,43.642634,1071.7044,1784.0718,9.298743,3632.107319,2.36447190357725
-c8,644.833,52.371161,26.185581,43.642634,565.8327,1784.0718,9.298743,3126.235619,2.7470788241608
-c9,644.833,52.371161,26.185581,43.642634,1071.7044,39.278371,9.298743,1887.31389,4.55039065492224
-c10,644.833,52.371161,26.185581,43.642634,1071.7044,39.278371,9.298743,1887.31389,4.55039065492224
-c11,644.833,52.371161,26.185581,43.642634,1071.7044,39.278371,9.298743,1887.31389,4.55039065492224
-c12,644.833,52.371161,26.185581,43.642634,1071.7044,1784.0718,9.298743,3632.107319,2.36447190357725
-c13,644.833,52.371161,26.185581,43.642634,1071.7044,910.2389,9.298743,2758.274419,3.11354648851759
-c14,644.833,52.371161,26.185581,43.642634,1071.7044,39.278371,9.298743,1887.31389,4.55039065492224
-c15,644.833,52.371161,26.185581,43.642634,565.8327,1784.0718,9.298743,3126.235619,2.7470788241608
-c4,1887.31389
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,202.0632,433.2493,211.0585,394.5152,190.18827,306.11015,2.463761,1739.648381,0.999999942517123
-c1,202.0632,27.159744,13.608704,22.719616,190.18827,306.11015,2.463761,764.313445,2.27609257009831
-c2,202.0632,27.159744,13.608704,22.719616,190.18827,180.10874,2.463761,638.312035,2.72538823188752
-c3,202.0632,27.159744,13.608704,22.719616,115.04262,306.11015,2.463761,689.167795,2.52427368370084
-c4,202.0632,27.159744,13.608704,22.719616,190.18827,20.759040,2.463761,478.962335,3.63211862533645
-c5,202.0632,27.159744,13.608704,22.719616,190.18827,20.759040,2.463761,478.962335,3.63211862533645
-c6,202.0632,27.159744,211.0585,22.719616,190.18827,20.759040,2.463761,676.412131,2.57187599702052
-c7,202.0632,27.159744,13.608704,22.719616,190.18827,306.11015,2.463761,764.313445,2.27609257009831
-c8,202.0632,27.159744,13.608704,22.719616,115.04262,306.11015,2.463761,689.167795,2.52427368370084
-c9,202.0632,27.159744,13.608704,22.719616,190.18827,20.759040,2.463761,478.962335,3.63211862533645
-c10,202.0632,27.159744,13.608704,22.719616,190.18827,20.759040,2.463761,478.962335,3.63211862533645
-c11,202.0632,27.159744,13.608704,22.719616,190.18827,20.759040,2.463761,478.962335,3.63211862533645
-c12,202.0632,27.159744,13.608704,22.719616,190.18827,306.11015,2.463761,764.313445,2.27609257009831
-c13,202.0632,27.159744,13.608704,22.719616,190.18827,180.10874,2.463761,638.312035,2.72538823188752
-c14,202.0632,27.159744,13.608704,22.719616,190.18827,20.759040,2.463761,478.962335,3.63211862533645
-c15,202.0632,27.159744,13.608704,22.719616,115.04262,306.11015,2.463761,689.167795,2.52427368370084
-c4,478.962335
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,644.833,1882.211,1052.832,2143.065,1071.7044,1784.0718,9.298743,8588.015943,0.999999988355867
-c1,644.833,168.647541,75.648404,118.224147,1071.7044,1784.0718,9.298743,3872.428035,2.21773410470276
-c2,644.833,168.647541,75.648404,118.224147,1071.7044,979.0974,9.400083,3067.554975,2.79962893347563
-c3,644.833,168.647541,75.648404,118.224147,565.8327,1835.1288,9.298743,3417.613335,2.51286931841079
-c4,644.833,168.647541,75.648404,118.224147,1071.7044,93.90756,9.298743,2182.263795,3.93537003598733
-c5,644.833,168.647541,75.648404,118.224147,1071.7044,93.90756,9.298743,2182.263795,3.93537003598733
-c6,644.833,168.647541,1052.832,118.224147,1071.7044,93.90756,9.298743,3159.447391,2.71820182720676
-c7,644.833,168.647541,75.648404,118.224147,1071.7044,1784.0718,9.298743,3872.428035,2.21773410470276
-c8,644.833,168.647541,75.648404,118.224147,565.8327,1835.1288,9.298743,3417.613335,2.51286931841079
-c9,644.833,168.647541,75.648404,118.224147,1071.7044,93.90756,9.298743,2182.263795,3.93537003598733
-c10,644.833,168.647541,75.648404,118.224147,1071.7044,93.90756,9.298743,2182.263795,3.93537003598733
-c11,644.833,168.647541,75.648404,118.224147,1071.7044,93.90756,9.298743,2182.263795,3.93537003598733
-c12,644.833,168.647541,75.648404,118.224147,1071.7044,1784.0718,9.298743,3872.428035,2.21773410470276
-c13,644.833,168.647541,75.648404,118.224147,1071.7044,979.0974,9.400083,3067.554975,2.79962893347563
-c14,644.833,168.647541,75.648404,118.224147,1071.7044,93.90756,9.298743,2182.263795,3.93537003598733
-c15,644.833,168.647541,75.648404,118.224147,565.8327,1835.1288,9.298743,3417.613335,2.51286931841079
-c4,2182.263795
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,63.122342,29.446347,47.189800,0,0,0,139.758489,0
-c2,0,63.122342,29.446347,47.189800,0,0,0,139.758489,0
-c3,0,63.122342,29.446347,47.189800,0,0,0,139.758489,0
-c4,0,63.122342,29.446347,47.189800,0,39.390943,0,179.149432,0
-c5,0,63.122342,29.446347,47.189800,0,39.390943,0,179.149432,0
-c6,0,63.122342,0,47.189800,0,39.390943,0,149.703085,0
-c7,0,63.122342,29.446347,47.189800,0,0,0,139.758489,0
-c8,0,63.122342,29.446347,47.189800,0,0,0,139.758489,0
-c9,0,63.122342,29.446347,47.189800,0,39.390943,0,179.149432,0
-c10,0,63.122342,29.446347,47.189800,0,39.390943,0,179.149432,0
-c11,0,63.122342,29.446347,47.189800,0,39.390943,0,179.149432,0
-c12,0,63.122342,29.446347,47.189800,0,0,0,139.758489,0
-c13,0,63.122342,29.446347,47.189800,0,0,0,139.758489,0
-c14,0,63.122342,29.446347,47.189800,0,39.390943,0,179.149432,0
-c15,0,63.122342,29.446347,47.189800,0,0,0,139.758489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,53.154038,20.016476,27.391713,0,0,0,100.562227,0
-c2,0,53.154038,20.016476,27.391713,0,0,0,100.562227,0
-c3,0,53.154038,20.016476,27.391713,0,0,0,100.562227,0
-c4,0,53.154038,20.016476,27.391713,0,15.238246,0,115.800473,0
-c5,0,53.154038,20.016476,27.391713,0,15.238246,0,115.800473,0
-c6,0,53.154038,0,27.391713,0,15.238246,0,95.783997,0
-c7,0,53.154038,20.016476,27.391713,0,0,0,100.562227,0
-c8,0,53.154038,20.016476,27.391713,0,0,0,100.562227,0
-c9,0,53.154038,20.016476,27.391713,0,15.238246,0,115.800473,0
-c10,0,53.154038,20.016476,27.391713,0,15.238246,0,115.800473,0
-c11,0,53.154038,20.016476,27.391713,0,15.238246,0,115.800473,0
-c12,0,53.154038,20.016476,27.391713,0,0,0,100.562227,0
-c13,0,53.154038,20.016476,27.391713,0,0,0,100.562227,0
-c14,0,53.154038,20.016476,27.391713,0,15.238246,0,115.800473,0
-c15,0,53.154038,20.016476,27.391713,0,0,0,100.562227,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,14.128858,5.690699,8.244707,0,0,0,28.064264,0
-c2,0,14.128858,5.690699,8.244707,0,0,0,28.064264,0
-c3,0,14.128858,5.690699,8.244707,0,0,0,28.064264,0
-c4,0,14.128858,5.690699,8.244707,0,5.486035,0,33.550299,0
-c5,0,14.128858,5.690699,8.244707,0,5.486035,0,33.550299,0
-c6,0,14.128858,0,8.244707,0,5.486035,0,27.8596,0
-c7,0,14.128858,5.690699,8.244707,0,0,0,28.064264,0
-c8,0,14.128858,5.690699,8.244707,0,0,0,28.064264,0
-c9,0,14.128858,5.690699,8.244707,0,5.486035,0,33.550299,0
-c10,0,14.128858,5.690699,8.244707,0,5.486035,0,33.550299,0
-c11,0,14.128858,5.690699,8.244707,0,5.486035,0,33.550299,0
-c12,0,14.128858,5.690699,8.244707,0,0,0,28.064264,0
-c13,0,14.128858,5.690699,8.244707,0,0,0,28.064264,0
-c14,0,14.128858,5.690699,8.244707,0,5.486035,0,33.550299,0
-c15,0,14.128858,5.690699,8.244707,0,0,0,28.064264,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,68.8585,0.10134,68.95984,0
-c3,0,0,0,0,0,51.057,0,51.057,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,51.057,0,51.057,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,68.8585,0.10134,68.95984,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,51.057,0,51.057,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,14.7838,0.219266,15.003066,0
-c3,0,0,0,0,0,10.568,0,10.568,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,10.568,0,10.568,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,14.7838,0.219266,15.003066,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,10.568,0,10.568,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,202.0632,433.2493,211.0585,394.5152,190.18827,306.11015,2.463761,1739.648381,0.999999942517123
-c1,202.0632,41.288602,19.299403,30.964323,190.18827,306.11015,2.463761,792.377709,2.19547842107729
-c2,202.0632,41.288602,19.299403,30.964323,190.18827,194.89254,2.683027,681.379365,2.55312710517333
-c3,202.0632,41.288602,19.299403,30.964323,115.04262,316.67815,2.463761,727.800059,2.39028304609095
-c4,202.0632,41.288602,19.299403,30.964323,190.18827,26.245075,2.463761,512.512634,3.394351526493
-c5,202.0632,41.288602,19.299403,30.964323,190.18827,26.245075,2.463761,512.512634,3.394351526493
-c6,202.0632,41.288602,211.0585,30.964323,190.18827,26.245075,2.463761,704.271731,2.47013767188425
-c7,202.0632,41.288602,19.299403,30.964323,190.18827,306.11015,2.463761,792.377709,2.19547842107729
-c8,202.0632,41.288602,19.299403,30.964323,115.04262,316.67815,2.463761,727.800059,2.39028304609095
-c9,202.0632,41.288602,19.299403,30.964323,190.18827,26.245075,2.463761,512.512634,3.394351526493
-c10,202.0632,41.288602,19.299403,30.964323,190.18827,26.245075,2.463761,512.512634,3.394351526493
-c11,202.0632,41.288602,19.299403,30.964323,190.18827,26.245075,2.463761,512.512634,3.394351526493
-c12,202.0632,41.288602,19.299403,30.964323,190.18827,306.11015,2.463761,792.377709,2.19547842107729
-c13,202.0632,41.288602,19.299403,30.964323,190.18827,194.89254,2.683027,681.379365,2.55312710517333
-c14,202.0632,41.288602,19.299403,30.964323,190.18827,26.245075,2.463761,512.512634,3.394351526493
-c15,202.0632,41.288602,19.299403,30.964323,115.04262,316.67815,2.463761,727.800059,2.39028304609095
-c4,512.512634
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_results2.csv b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_results2.csv
deleted file mode 100644
index 0fb57641e8484f4b2d7403c55ac35414dbf58bba..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_results2.csv
+++ /dev/null
@@ -1,253 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,644.833,1882.211,1052.832,2143.065,1071.7044,1784.0718,9.298743,8588.015943,0.999999988355867
-c1,644.833,39.338749,1026.8995,32.782291,16.391145,29.504062,9.298743,1799.04749,4.77364578387835
-c2,644.833,39.338749,1026.8995,32.782291,16.391145,29.504062,9.298743,1799.04749,4.77364578387835
-c3,644.833,39.338749,1026.8995,32.782291,16.391145,39.278371,9.298743,1808.821799,4.74785049194055
-c4,644.833,39.338749,1026.8995,2143.065,16.391145,29.504062,9.298743,3909.330199,2.19679978056518
-c5,644.833,39.338749,1026.8995,32.782291,565.8327,29.504062,9.298743,2348.489045,3.65682590497986
-c6,644.833,39.338749,1026.8995,32.782291,16.391145,29.504062,9.298743,1799.04749,4.77364578387835
-c7,644.833,39.338749,1026.8995,32.782291,16.391145,29.504062,9.298743,1799.04749,4.77364578387835
-c8,644.833,39.338749,1026.8995,32.782291,16.391145,29.504062,9.298743,1799.04749,4.77364578387835
-c9,644.833,39.338749,1026.8995,32.782291,16.391145,29.504062,9.298743,1799.04749,4.77364578387835
-c10,644.833,39.338749,1026.8995,32.782291,16.391145,39.278371,9.298743,1808.821799,4.74785049194055
-c11,644.833,39.338749,1026.8995,2143.065,16.391145,29.504062,9.298743,3909.330199,2.19679978056518
-c12,644.833,39.338749,1026.8995,32.782291,565.8327,29.504062,9.298743,2348.489045,3.65682590497986
-c13,644.833,39.338749,1026.8995,32.782291,16.391145,29.504062,9.298743,1799.04749,4.77364578387835
-c14,644.833,39.338749,1026.8995,32.782291,16.391145,39.278371,9.298743,1808.821799,4.74785049194055
-c15,644.833,39.338749,1026.8995,32.782291,16.391145,29.504062,9.298743,1799.04749,4.77364578387835
-c16,644.833,52.371161,26.185581,32.782291,565.8327,29.504062,9.298743,1360.807538,6.31096982643479
-c17,644.833,39.338749,1026.8995,2143.065,16.391145,29.504062,9.298743,3909.330199,2.19679978056518
-c18,644.833,39.338749,1026.8995,32.782291,16.391145,29.504062,9.298743,1799.04749,4.77364578387835
-c16,1360.807538
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,202.0632,433.2493,211.0585,394.5152,190.18827,306.11015,2.463761,1739.648381,0.999999942517123
-c1,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c2,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c3,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c4,202.0632,27.159744,228.5053,394.5152,11.532800,20.759040,2.463761,886.999045,1.9612740224231
-c5,202.0632,27.159744,228.5053,22.719616,115.04262,20.759040,2.463761,618.713281,2.81171934278887
-c6,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c7,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c8,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c9,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c10,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c11,202.0632,27.159744,228.5053,394.5152,11.532800,20.759040,2.463761,886.999045,1.9612740224231
-c12,202.0632,27.159744,228.5053,22.719616,115.04262,20.759040,2.463761,618.713281,2.81171934278887
-c13,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c14,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c15,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c16,202.0632,27.159744,13.608704,22.719616,115.04262,20.759040,2.463761,403.816685,4.30801404404228
-c17,202.0632,27.159744,228.5053,394.5152,11.532800,20.759040,2.463761,886.999045,1.9612740224231
-c18,202.0632,27.159744,228.5053,22.719616,11.532800,20.759040,2.463761,515.203461,3.37662336343984
-c16,403.816685
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,644.833,1882.211,1052.832,2143.065,1071.7044,1784.0718,9.298743,8588.015943,0.999999988355867
-c1,644.833,155.615129,1026.8995,107.363804,49.35844,84.133251,9.298743,2077.501867,4.13381844128959
-c2,644.833,155.615129,1026.8995,107.363804,49.35844,84.133251,9.298743,2077.501867,4.13381844128959
-c3,644.833,155.615129,1026.8995,107.363804,49.35844,93.90756,9.298743,2087.276176,4.11446057321067
-c4,644.833,155.615129,1026.8995,2245.12,49.35844,84.133251,9.298743,4215.258063,2.03736416867239
-c5,644.833,155.615129,1026.8995,107.363804,565.8327,84.133251,9.298743,2593.976127,3.31075352719491
-c6,644.833,155.615129,1026.8995,107.363804,49.35844,84.133251,9.298743,2077.501867,4.13381844128959
-c7,644.833,155.615129,1026.8995,107.363804,49.35844,84.133251,9.298743,2077.501867,4.13381844128959
-c8,644.833,155.615129,1026.8995,107.363804,49.35844,84.133251,9.298743,2077.501867,4.13381844128959
-c9,644.833,155.615129,1026.8995,107.363804,49.35844,84.133251,9.298743,2077.501867,4.13381844128959
-c10,644.833,155.615129,1026.8995,107.363804,49.35844,93.90756,9.298743,2087.276176,4.11446057321067
-c11,644.833,155.615129,1026.8995,2245.12,49.35844,84.133251,9.298743,4215.258063,2.03736416867239
-c12,644.833,155.615129,1026.8995,107.363804,565.8327,84.133251,9.298743,2593.976127,3.31075352719491
-c13,644.833,155.615129,1026.8995,107.363804,49.35844,84.133251,9.298743,2077.501867,4.13381844128959
-c14,644.833,155.615129,1026.8995,107.363804,49.35844,93.90756,9.298743,2087.276176,4.11446057321067
-c15,644.833,155.615129,1026.8995,107.363804,49.35844,84.133251,9.298743,2077.501867,4.13381844128959
-c16,644.833,168.647541,75.648404,107.363804,565.8327,84.133251,9.298743,1655.757443,5.18675936540788
-c17,644.833,155.615129,1026.8995,2245.12,49.35844,84.133251,9.298743,4215.258063,2.03736416867239
-c18,644.833,155.615129,1026.8995,107.363804,49.35844,84.133251,9.298743,2077.501867,4.13381844128959
-c16,1655.757443
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c2,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c3,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c4,0,63.122342,0,0,22.543231,39.390943,0,125.056516,0
-c5,0,63.122342,0,47.189800,0,39.390943,0,149.703085,0
-c6,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c7,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c8,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c9,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c10,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c11,0,63.122342,0,0,22.543231,39.390943,0,125.056516,0
-c12,0,63.122342,0,47.189800,0,39.390943,0,149.703085,0
-c13,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c14,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c15,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c16,0,63.122342,29.446347,47.189800,0,39.390943,0,179.149432,0
-c17,0,63.122342,0,0,22.543231,39.390943,0,125.056516,0
-c18,0,63.122342,0,47.189800,22.543231,39.390943,0,172.246316,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c2,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c3,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c4,0,53.154038,0,0,10.424064,15.238246,0,78.816348,0
-c5,0,53.154038,0,27.391713,0,15.238246,0,95.783997,0
-c6,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c7,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c8,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c9,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c10,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c11,0,53.154038,0,0,10.424064,15.238246,0,78.816348,0
-c12,0,53.154038,0,27.391713,0,15.238246,0,95.783997,0
-c13,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c14,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c15,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c16,0,53.154038,20.016476,27.391713,0,15.238246,0,115.800473,0
-c17,0,53.154038,0,0,10.424064,15.238246,0,78.816348,0
-c18,0,53.154038,0,27.391713,10.424064,15.238246,0,106.208061,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c2,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c3,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c4,0,14.128858,0,0,3.455880,5.486035,0,23.070773,0
-c5,0,14.128858,0,8.244707,0,5.486035,0,27.8596,0
-c6,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c7,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c8,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c9,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c10,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c11,0,14.128858,0,0,3.455880,5.486035,0,23.070773,0
-c12,0,14.128858,0,8.244707,0,5.486035,0,27.8596,0
-c13,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c14,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c15,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c16,0,14.128858,5.690699,8.244707,0,5.486035,0,33.550299,0
-c17,0,14.128858,0,0,3.455880,5.486035,0,23.070773,0
-c18,0,14.128858,0,8.244707,3.455880,5.486035,0,31.31548,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,102.055,0,0,0,102.055,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,102.055,0,0,0,102.055,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,102.055,0,0,0,102.055,0
-c18,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,20.9833,0,0,0,20.9833,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,20.9833,0,0,0,20.9833,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,20.9833,0,0,0,20.9833,0
-c18,0,0,0,0,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,202.0632,433.2493,211.0585,394.5152,190.18827,306.11015,2.463761,1739.648381,0.999999942517123
-c1,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c2,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c3,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c4,202.0632,41.288602,228.5053,415.4985,14.98868,26.245075,2.463761,931.053118,1.86847362467308
-c5,202.0632,41.288602,228.5053,30.964323,115.04262,26.245075,2.463761,646.572881,2.69056770406557
-c6,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c7,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c8,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c9,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c10,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c11,202.0632,41.288602,228.5053,415.4985,14.98868,26.245075,2.463761,931.053118,1.86847362467308
-c12,202.0632,41.288602,228.5053,30.964323,115.04262,26.245075,2.463761,646.572881,2.69056770406557
-c13,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c14,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c15,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c16,202.0632,41.288602,19.299403,30.964323,115.04262,26.245075,2.463761,437.366984,3.97754756734277
-c17,202.0632,41.288602,228.5053,415.4985,14.98868,26.245075,2.463761,931.053118,1.86847362467308
-c18,202.0632,41.288602,228.5053,30.964323,14.98868,26.245075,2.463761,546.518941,3.18314322190286
-c16,437.366984
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_tensors.txt b/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_tensors.txt
deleted file mode 100644
index 55ad19b1c8cd2123e86ce7cf7dc43dad7516e413..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet2_cifar10/alexnet2_tensors.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-#Conv1,3
-Conv1,115.33,355.301,200.956,558.231,6.64024,16.2201,41.8978,123.674
-Add1,47.2501,154.498,36.2217,116.852,56.9247,172.83,41.6054,134.411
-Tanh1,39.4831,135.034,33.4466,118.003,57.0977,185.926,41.7089,145.509
-#Conv2,4
-Conv2,310.308,1282.26,610.501,2404.06,56.8745,201.057,41.8404,192.576
-Add2,45.3358,217.323,35.0236,153.259,158.537,686.903,41.5412,177.63
-Tanh2,38.504,188.321,36.2146,162.298,56.9829,242.874,41.625,182.762
-Pool1,39.1015,194.307,43.674,198.246,54.8347,240.102,10.6202,44.9693
-#Conv3,3
-Conv3,165.302,812.529,188.912,837.371,15.6031,66.3758,21.0237,93.63
-Add3,26.3653,138.399,21.2537,100.994,28.3975,126.566,20.9649,93.9519
-Tanh3,19.3912,101.904,18.3396,88.5345,28.9295,129.858,20.8836,94.2318
-#Conv4,4
-Conv4,303.007,1619.75,284.927,1346.18,28.3107,127.724,20.9833,102.055
-Add4,26.3854,150.498,19.4384,96.5969,76.34,366.744,20.8326,97.3319
-Tanh4,19.2511,109.512,18.4717,92.4694,28.0864,130.933,20.8569,97.8308
-Pool2,45.8717,263.305,22.3065,111.077,27.6028,129.232,5.39931,22.7498
-#Conv5,3
-Conv5,156.337,882.718,86.4203,417.756,8.38767,36.7727,10.6053,48.7992
-Add5,24.2343,136.978,19.2262,98.1635,14.7236,68.2983,10.5498,48.5475
-Tanh5,9.61697,52.0084,9.39612,49.9132,14.9164,69.1465,10.5006,48.3962
-#Conv6,4
-Conv6,248.321,1443.4,139.892,696.675,14.7838,68.8585,10.568,51.057
-Add6,24.3243,144.761,18.7741,98.9177,39.6841,197.748,10.548,49.6626
-Tanh6,9.63435,54.9648,9.37294,50.5591,14.3859,68.827,10.5241,49.2841
-Pool3,23.8305,140.946,12.0697,64.0871,14.3906,68.8152,2.78859,10.4233
-#FC1,2
-Mul1,2.20702,9.19011,4.29982,22.6027,5.01057,21.4017,0.219266,0.10134
-Add7,0.256741,0.108633,0.658159,3.49495,0.887661,1.49455,0.140755,0.0381309
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_confs1.txt b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_confs1.txt
deleted file mode 100644
index ff963b918fa017a3635a326bbbd7111246cf9384..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_confs1.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-9 9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,7,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,6,6,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,7,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,6,8 8 8,7,7
-8 8 8 8,6,7,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,6,8 8 8,8 8 8,7,7
-8 8 8 8,7,7,8 8 8,8 8 8 8,7
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_confs2.txt b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_confs2.txt
deleted file mode 100644
index 68b20e726f79648fa9c91169eb1f0a1111b6a889..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_confs2.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-9 9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9
-8 8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,7
-8 8 8 8,7,4,6,8 8 8 8,7
-8 8 8 8,8 8 8 8,7,4,6,7
-8 8 8 8,4,4,6,4,7
-8 8 8 8,4,4,7,7,7
-8 8 8 8,4,4,8 8 8,5,7
-8 8 8 8,7,7,7,8 8 8 8,7
-8 8 8 8,7,5,7,4,7
-8 8 8 8,8 8 8 8,8 8 8,6,4,7
-8 8 8 8,8 8 8 8,4,6,5,7
-8 8 8 8,7,4,6,8 8 8 8,7
-8 8 8 8,8 8 8 8,7,4,6,7
-8 8 8 8,4,4,6,4,7
-8 8 8 8,4,4,7,7,7
-8 8 8 8,4,4,8 8 8,5,7
-8 8 8 8,7,7,7,8 8 8 8,7
-8 8 8 8,7,5,7,4,7
-8 8 8 8,8 8 8 8,8 8 8,6,4,7
-8 8 8 8,4,8 8 8,8 8 8,7,7
-8 8 8 8,8 8 8 8,4,6,5,7
-8 8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,7
-8 8 8 8,7,4,6,8 8 8 8,7
-8 8 8 8,8 8 8 8,4,6,5,7
-8 8 8 8,4,4,6,4,7
-8 8 8 8,7,5,7,4,7
-8 8 8 8,7,7,7,8 8 8 8,7
-8 8 8 8,8 8 8 8,8 8 8,6,4,7
-8 8 8 8,8 8 8 8,7,4,6,7
-8 8 8 8,4,4,8 8 8,5,7
-8 8 8 8,4,4,7,7,7
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_fp16.csv b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_fp16.csv
deleted file mode 100644
index d6a6ef63a3128bc63e9fb6b5d07ce78c7d520338..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_fp16.csv
+++ /dev/null
@@ -1,61 +0,0 @@
-Add1,93.2842,353.575,204.306,149.269,3790.29,2190.14,1600.15,0.49058,4.5223,3.4086,1.24796,43.6026,34.3362,10.1933
-Add1_f2h,309.757,1205.25,778.891,426.362,3892.05,2515.4,1376.65,9.65112,25.7956,14.7813,11.0587,35.4623,28.6884,7.22661
-Add1_h2f,83.2903,330.731,185.213,145.518,3970.82,2223.7,1747.13,0.806036,4.16027,2.76063,1.51937,31.9222,24.6197,8.42906
-Add2,57.2394,332.553,242.25,90.3031,5809.85,4232.21,1577.64,0.457316,4.377,3.61364,0.896967,59.0466,51.9109,9.25098
-Add2_f2h,183.024,1140.1,895.094,245.002,6230.37,4891.61,1338.76,5.77653,29.1735,22.2262,6.9702,39.1547,34.6242,5.22016
-Add2_h2f,62.1204,356.499,249.999,106.501,5738.83,4024.41,1714.41,0.394536,3.84656,3.14568,0.877756,48.4454,42.9786,7.80049
-Add3,64.5391,421.019,313.098,107.922,6523.48,4851.29,1672.19,0.585465,4.31465,3.23825,1.10633,31.0225,23.7417,8.2678
-Add3_f2h,39.2865,257.541,197.461,60.0801,6555.52,5026.24,1529.28,0.47706,3.33185,2.49827,0.847902,40.88,31.2198,10.5659
-Add3_h2f,31.1203,195.786,142.213,53.5726,6291.23,4569.77,1721.46,0.306809,2.38301,1.71741,0.675567,39.8861,28.601,11.885
-Add4,40.9697,274.189,212.979,61.2102,6692.3,5198.28,1494.02,0.384841,4.60445,3.90429,0.821323,78.8032,70.489,12.2778
-Add4_f2h,75.4038,524.503,420.111,104.392,6958.45,5574.07,1384.38,5.55358,35.9191,28.1478,7.80022,43.8327,40.6864,11.0572
-Add4_h2f,20.652,132.57,101.351,31.2198,6419.34,4907.63,1511.71,0.183388,1.80855,1.4875,0.410811,74.4576,63.8749,15.3763
-Add5,41.049,260.675,199.276,61.3997,6350.43,4854.66,1495.77,0.362686,4.17524,3.61668,0.698418,90.2431,81.3189,11.135
-Add5_f2h,79.1767,523.707,412.572,111.134,6618.91,5215.04,1403.88,7.23508,41.7069,31.8753,9.84169,56.7552,52.1865,6.95965
-Add5_h2f,20.6328,125.982,94.7906,31.1915,6105.9,4594.17,1511.73,0.195554,2.38048,1.96829,0.481221,98.2565,84.4038,17.013
-Add6,0.580578,3.19613,2.21839,0.977745,5485.52,3807.55,1677.98,0.0359955,1.55104,1.07733,0.47379,2596.77,1804.25,792.663
-Add6_f2h,0.460408,1.00469,0.697219,0.307468,1898.57,1317.21,581.362,0.176587,1.16926,0.811923,0.35736,1980.93,1374.61,606.356
-Add6_h2f,0.0886822,0.0103369,0.00716523,0.00317167,111.312,77.1828,34.1295,0.015021,0.0600414,0.0415981,0.0184434,608.231,421.665,186.568
-Conv1,759.668,1563.71,760.258,803.448,2058.68,1000.97,1057.71,11.5002,10.4346,3.29189,8.66694,18.8258,13.6161,6.05864
-Conv1_f2h,5.44249,6.60174,1.40281,5.19893,1213.19,257.967,955.226,0.17219,0.353329,0.126049,0.277371,56.2627,24.0056,40.2598
-Conv1_h2f,83.2352,403.353,303.997,99.3552,4845.94,3652.27,1193.67,0.413148,2.25226,1.68684,0.670959,12.8815,9.56103,5.46339
-Conv2,1196.88,6519.17,4850.19,1668.97,5447.23,4052.73,1394.49,11.586,14.405,8.37537,10.139,44.9168,38.8365,6.77628
-Conv2_f2h,26.722,110.701,57.7904,52.9111,4142.93,2162.81,1980.12,0.279116,1.38408,0.889919,0.604014,52.084,36.0813,18.6573
-Conv2_h2f,62.1589,473.091,401.012,72.0794,7611.02,6451.42,1159.6,0.35738,2.96054,2.47914,0.520919,25.9077,21.4465,5.48728
-Conv3,561.436,3134.15,2164.67,969.482,5582.78,3855.92,1726.86,5.64894,12.6119,7.66204,6.48434,42.9576,35.3499,8.45526
-Conv3_f2h,20.4033,103.834,61.6107,42.2228,5089.07,3019.68,2069.39,0.33686,2.07671,1.24786,0.849022,58.3312,37.7391,22.1356
-Conv3_h2f,31.0982,213.521,168.425,45.0957,6866.05,5415.94,1450.12,0.193664,1.81345,1.41647,0.418016,45.1015,35.0321,10.9083
-Conv4,627.538,4183.57,3200.17,983.396,6666.76,5099.68,1567.08,3.19725,10.3525,6.96967,4.85478,29.6592,25.6687,6.16061
-Conv4_f2h,39.4494,231.112,155.03,76.0814,5858.45,3929.88,1928.58,0.383614,2.64826,1.77466,0.893289,38.1649,27.1767,11.8054
-Conv4_h2f,20.805,157.506,130.042,27.4631,7570.58,6250.58,1320,0.262449,2.5472,2.06942,0.51889,78.0758,64.5926,16.6892
-Conv5,430.284,2806.26,2156.62,649.637,6522.06,5012.26,1509.8,3.00065,16.2034,13.6006,4.30614,43.4971,39.7871,5.8117
-Conv5_f2h,26.6141,161.682,116.58,45.1028,6075.21,4380.5,1694.71,0.247547,2.32135,1.82405,0.588892,78.8117,64.6347,17.3373
-Conv5_h2f,20.7426,148.724,120.885,27.8386,7169.94,5827.84,1342.1,0.210998,2.14481,1.7637,0.390591,69.9498,57.905,12.7277
-Mul1,6.85001,40.5555,28.1774,12.3781,5920.38,4113.44,1806.94,0.171041,1.77782,1.24768,0.543706,208.025,148.206,62.2562
-Mul1_f2h,7.36824,40.3988,28.1257,12.2731,5482.81,3817.12,1665.69,0.13546,1.47559,1.05633,0.430894,175.413,126.752,50.5788
-Mul1_h2f,0.141077,0.0604596,0.0419153,0.0185444,379.141,262.838,116.304,0.0403448,0.169651,0.117675,0.0519786,996.445,691.124,305.345
-Pool1,77.3703,318.571,162.02,156.551,4117.58,2094.15,2023.43,0.455239,2.35316,1.53673,0.961891,33.6923,22.5917,12.0676
-Pool1_f2h,210.001,844.263,423.672,420.591,4020.89,2017.89,2003,4.20962,10.864,4.63699,6.43175,33.3606,22.9331,10.8583
-Pool1_h2f,20.9052,88.6981,46.9757,41.7224,4246.27,2248.89,1997.38,0.667696,1.1714,0.660301,0.539743,117.997,63.6641,55.0082
-Pool2,58.7924,314.375,188.669,125.706,5347.29,3209.14,2138.15,0.411831,2.52633,1.74937,0.927995,36.5478,28.4951,9.66495
-Pool2_f2h,76.8368,405.93,248.377,157.553,5283.1,3232.6,2050.5,0.716538,3.90927,2.59879,1.50885,31.914,26.7871,6.83944
-Pool2_h2f,15.6216,82.0579,49.566,32.4919,5255.37,3174.44,2080.93,0.215716,3.1694,1.92435,1.25917,232.498,141.245,92.0377
-Pool3,20.5304,123.325,86.6382,36.6872,6007.12,4220.09,1787.03,0.282783,2.47786,1.92759,0.626736,98.3816,79.4265,23.2579
-Pool3_f2h,26.4508,154.684,109.843,44.8406,5849.24,4153.88,1695.36,0.618053,2.67039,1.80558,0.988641,86.0654,72.4368,16.4758
-Pool3_h2f,5.2822,29.1224,20.4156,8.70687,5513.14,3864.89,1648.25,0.11154,1.36843,0.975666,0.401594,228.306,164.606,65.5302
-Softmax1,2.04532,9.65196,6.69876,2.9532,4718.43,3274.79,1443.63,0.0592166,1.26487,0.87714,0.388768,599.51,416.222,183.818
-Tanh1,71.349,309.397,166.636,142.762,4336.38,2335.5,2000.89,0.319789,2.76113,1.82636,1.0288,32.4436,22.8151,10.8671
-Tanh1_f2h,100.913,396.573,209.559,187.014,3930.03,2076.77,1853.26,0.899812,3.09604,1.99119,1.45311,33.4653,24.7837,9.33032
-Tanh1_h2f,83.2272,381.858,208.746,173.112,4588.14,2508.14,2080,0.436928,2.54257,1.61456,1.04033,21.0556,14.923,7.84162
-Tanh2,53.7113,299.392,193.858,105.534,5574.12,3609.27,1964.84,0.516207,3.73585,2.74272,1.11904,46.5019,38.4508,10.2748
-Tanh2_f2h,76.8765,416.385,277.089,139.296,5416.46,3604.5,1811.96,0.713004,3.82792,2.92357,1.24867,42.9582,37.8125,7.71353
-Tanh2_h2f,62.0292,344.851,220.072,124.778,5559.5,3547.89,2011.61,0.268756,2.54138,2.0299,0.664609,34.7016,29.7044,7.08025
-Tanh3,27.4497,170.841,118.94,51.9014,6223.81,4333.02,1890.79,0.272378,2.30827,1.59613,0.719755,58.4892,40.4211,18.4657
-Tanh3_f2h,39.0187,237.416,168.18,69.2353,6084.79,4310.35,1774.44,0.483226,2.94737,2.05624,0.902876,39.7976,28.4209,12.0028
-Tanh3_h2f,31.0592,188.341,129.589,58.7521,6063.92,4172.31,1891.61,0.214138,1.88422,1.30556,0.593641,41.1045,29.1294,12.6756
-Tanh4,18.3085,118.735,87.9986,30.7359,6485.13,4806.35,1678.77,0.220835,2.58413,2.07212,0.565112,112.566,92.977,23.0207
-Tanh4_f2h,26.1769,164.262,123.396,40.8663,6275.42,4714.21,1561.21,0.358447,2.34225,1.86472,0.587341,71.3708,61.4761,14.4163
-Tanh4_h2f,20.6587,128.708,94.4883,34.2199,6230.16,4573.71,1656.45,0.207862,2.25217,1.81483,0.480835,85.0819,70.6279,17.1689
-Tanh5,18.3283,114.102,83.1648,30.9372,6224.98,4537.15,1687.83,0.260623,5.957,4.43105,1.5561,299.19,223.089,77.8918
-Tanh5_f2h,26.1523,156.042,115.325,40.7168,5968.06,4410.79,1557.27,0.343513,4.99963,3.81509,1.25126,209.878,159.503,52.6995
-Tanh5_h2f,20.633,123.091,88.9037,34.1876,5965.83,4308.87,1656.96,0.190143,2.18341,1.76558,0.481496,94.4781,78.5292,18.9483
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_fp32.csv b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_fp32.csv
deleted file mode 100644
index fe62cc77eb9744a531345697df044276e4bb26e2..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_fp32.csv
+++ /dev/null
@@ -1,21 +0,0 @@
-Add1,114.477,689.511,545.497,144.014,6023.13,4765.12,1258.01,0.494926,3.39383,2.63662,0.913302,15.2664,12.3757,5.32532
-Add2,139.245,1083.98,913.705,170.273,7784.72,6561.88,1222.83,0.616984,5.07127,4.2664,0.881209,16.6482,14.6652,3.17889
-Add3,73.7075,582.264,483.313,98.9509,7899.66,6557.19,1342.47,0.766894,6.39355,5.25146,1.16607,26.1761,21.787,5.48411
-Add4,47.9113,399.265,338.936,60.3286,8333.36,7074.23,1259.12,0.344035,4.04787,2.81768,2.21291,52.9096,30.2717,43.7817
-Add5,47.955,388.123,327.404,60.7191,8093.5,6827.34,1266.16,0.385326,4.51233,3.74464,0.829162,71.7827,59.7486,13.7432
-Add6,0.1721,0.083548,0.0671001,0.0164478,452.189,363.168,89.0211,0.0152622,0.264957,0.212801,0.0521557,1369.25,1099.72,269.533
-Conv1,925.494,3271.78,2320.75,951.03,3535.33,2507.72,1027.61,7.38366,11.5946,7.18956,6.71324,22.3312,19.251,4.89111
-Conv2,2080.05,13944.8,11447.4,2497.39,6704.35,5503.69,1200.66,13.4036,25.0061,20.4344,10.1874,42.1223,38.0878,4.46151
-Conv3,1014.71,6863.91,5475.41,1388.5,6764.61,5396.22,1368.39,5.96381,14.6827,14.1842,4.69302,43.9951,39.249,5.4172
-Conv4,1226.86,9535.92,7945,1590.92,7772.68,6475.96,1296.73,4.45174,35.4147,15.5377,26.2279,31.4207,24.4877,19.7888
-Conv5,861.092,6591.9,5495.97,1095.93,7655.42,6382.69,1272.73,4.21251,19.5607,11.6479,15.1544,34.6117,29.1745,17.0262
-Mul1,3.73053,25.3858,20.4052,4.98065,6805.05,5469.93,1335.12,0.0924027,1.617,1.29761,0.32099,403.014,323.605,79.8704
-Pool1,175.352,985.339,672.448,312.891,5619.71,3835.24,1784.47,3.94384,18.2525,12.0852,6.20264,25.3347,19.4031,6.99576
-Pool2,132.287,891.752,674.086,217.667,6741.51,5096,1645.51,3.25049,19.5357,14.6978,4.86596,26.8879,22.2401,5.69913
-Pool3,44.6908,343.93,279.325,64.6046,7695.89,6250.31,1445.57,0.968672,7.36666,5.9023,1.48972,37.1193,30.6839,9.10475
-Softmax1,1.92222,11.9898,9.62903,2.36074,6235.93,5008.1,1227.83,0.0948851,1.8978,1.52456,0.373476,928.239,745.748,182.625
-Tanh1,76.4029,470.583,342.12,128.463,6159.2,4477.81,1681.39,0.423554,3.56257,2.71704,0.923294,25.9953,20.7846,7.04828
-Tanh2,55.9198,411.855,325.555,86.2999,7365.1,5821.83,1543.28,0.307608,2.90099,2.28812,0.637094,29.5989,23.7191,6.64274
-Tanh3,27.9644,214.918,172.203,42.7143,7685.42,6157.97,1527.45,0.27067,2.55707,2.04195,0.526121,57.2266,45.9891,11.875
-Tanh4,18.6218,150.823,125.161,25.6619,8099.35,6721.29,1378.06,0.144119,1.9824,1.59565,0.785064,92.3216,73.7595,41.0994
-Tanh5,18.6201,147.003,121.178,25.8248,7894.73,6507.83,1386.9,0.18229,2.27179,1.84236,0.444801,85.7818,70.1289,16.9109
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_layers.txt b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_layers.txt
deleted file mode 100644
index bc8c3f5668a2fdb5eb8a568f34b334fe02016954..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_layers.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-Conv1,2000,3,32,32,64,3,11,11,1,1
-Conv2,2000,64,16,16,192,64,5,5,1,1
-Conv3,2000,192,8,8,384,192,3,3,1,1
-Conv4,2000,384,8,8,256,384,3,3,1,1
-Conv5,2000,256,8,8,256,256,3,3,1,1
-FC1,2000,4096,4096,10
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_ops.txt b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_ops.txt
deleted file mode 100644
index 9d047b9e469f980534fa95b39fd43e2984bf9d43..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_ops.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-#Conv1,4
-Conv1
-Add1
-Tanh1
-Pool1
-#Conv2,4
-Conv2
-Add2
-Tanh2
-Pool2
-#Conv3,3
-Conv3
-Add3
-Tanh3
-#Conv4,3
-Conv4
-Add4
-Tanh4
-#Conv5,4
-Conv5
-Add5
-Tanh5
-Pool3
-#FC1,2
-Mul1
-Add6
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_confs1.txt b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_confs1.txt
deleted file mode 100644
index f9a86825a6429e3145247f1680c64999ecfb918a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_confs1.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-9 9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9
-8 8 8 8,4,6,6,7,7
-8 8 8 8,6,9 9 9,6,9 9 9 9,3
-8 8 8 8,4,3,8 8 8,8 8 8 8,4
-9 9 9 9,4,3,9 9 9,7,9 9
-8 8 8 8,8 8 8 8,9 9 9,7,3,4
-8 8 8 8,4,6,8 8 8,9 9 9 9,7
-8 8 8 8,9 9 9 9,6,6,9 9 9 9,4
-9 9 9 9,7,9 9 9,8 8 8,9 9 9 9,5
-8 8 8 8,9 9 9 9,7,8 8 8,5,5
-8 8 8 8,8 8 8 8,7,7,8 8 8 8,5
-9 9 9 9,6,6,8 8 8,9 9 9 9,7
-8 8 8 8,4,4,8 8 8,8 8 8 8,8 8
-8 8 8 8,9 9 9 9,6,6,9 9 9 9,8 8
-8 8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,9 9
-9 9 9 9,8 8 8 8,9 9 9,9 9 9,7,7
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_confs2.txt b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_confs2.txt
deleted file mode 100644
index c73097f5494c9545b8d6a2bd7f737a7a9ad2dcc8..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_confs2.txt
+++ /dev/null
@@ -1,52 +0,0 @@
-9 9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9
-8 8 8 8,3,1,4,7,3
-8 8 8 8,3,1,6,7,3
-8 8 8 8,4,1,3,7,4
-9 9 9 9,4,1,9 9 9,3,3
-8 8 8 8,3,1,7,7,4
-9 9 9 9,7,1,7,4,9 9
-8 8 8 8,3,1,6,5,9 9
-9 9 9 9,1,5,7,2,9 9
-8 8 8 8,3,2,2,7,3
-8 8 8 8,9 9 9 9,1,7,3,9 9
-8 8 8 8,4,1,6,7,8 8
-8 8 8 8,4,1,6,7,5
-8 8 8 8,1,8 8 8,6,7,3
-8 8 8 8,5,1,6,7,7
-9 9 9 9,7,1,6,7,8 8
-8 8 8 8,3,1,9 9 9,7,8 8
-8 8 8 8,3,7,2,7,3
-8 8 8 8,3,8 8 8,2,4,8 8
-9 9 9 9,1,8 8 8,9 9 9,4,8 8
-8 8 8 8,5,4,3,2,9 9
-8 8 8 8,5,1,6,8 8 8 8,3
-8 8 8 8,9 9 9 9,1,8 8 8,7,3
-9 9 9 9,4,2,4,9 9 9 9,2
-9 9 9 9,2,3,8 8 8,7,2
-9 9 9 9,3,2,6,7,3
-8 8 8 8,6,4,6,2,3
-8 8 8 8,6,4,9 9 9,2,3
-8 8 8 8,7,4,4,3,4
-9 9 9 9,4,7,6,2,8 8
-8 8 8 8,7,3,3,5,5
-8 8 8 8,3,2,6,7,7
-9 9 9 9,7,4,6,3,4
-9 9 9 9,7,9 9 9,3,7,3
-8 8 8 8,3,9 9 9,5,6,3
-9 9 9 9,6,7,3,4,5
-8 8 8 8,3,9 9 9,6,3,3
-8 8 8 8,2,5,7,9 9 9 9,3
-8 8 8 8,3,5,6,7,3
-8 8 8 8,4,7,6,5,3
-8 8 8 8,3,3,6,7,4
-8 8 8 8,3,4,7,5,6
-8 8 8 8,7,7,6,7,3
-8 8 8 8,5,4,7,4,7
-8 8 8 8,7,3,4,7,7
-8 8 8 8,4,3,7,4,5
-9 9 9 9,3,9 9 9,5,7,3
-8 8 8 8,7,2,7,9 9 9 9,7
-8 8 8 8,3,4,4,8 8 8 8,3
-8 8 8 8,3,5,6,7,5
-9 9 9 9,3,4,9 9 9,7,3
-8 8 8 8,7,4,7,7,3
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_results1.csv b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_results1.csv
deleted file mode 100644
index 4854a2efa2b2e321decb3ef95ab6bda3b1e22f42..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_results1.csv
+++ /dev/null
@@ -1,220 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2545.253,150.857135,137.685622,177.024371,157.113483,0.174571,3168.108182,14.8331815597962
-c2,2545.253,255.701869,7661.092,177.024371,7470.956,0.057413,18110.084653,2.59485949342205
-c3,2545.253,150.857135,60.283643,4576.494,3304.362,0.077363,10637.327141,4.41775685595832
-c4,5417.213,150.857135,60.283643,10086.008,157.113483,25.469348,15896.944609,2.95611051105908
-c5,2545.253,7465.49,7661.092,235.670225,51.671694,0.077363,17959.254282,2.61665235919258
-c6,2545.253,150.857135,137.685622,4576.494,7470.956,0.174571,14881.420328,3.15783870063778
-c7,2545.253,16332.387,137.685622,177.024371,7470.956,0.077363,26663.383356,1.7624591952311
-c8,5417.213,340.412547,7661.092,4576.494,7470.956,0.086312,25466.253859,1.8453096958688
-c9,2545.253,16332.387,183.299064,4576.494,77.680494,0.086312,23715.19987,1.98156142083756
-c10,2545.253,7465.49,183.299064,235.670225,3304.362,0.086312,13734.160601,3.42162337918315
-c11,5417.213,255.701869,137.685622,4576.494,7470.956,0.174571,17858.225062,2.63145552941035
-c12,2545.253,150.857135,81.230765,4576.494,3304.362,43.75163,10701.94853,4.39108119209875
-c13,2545.253,16332.387,137.685622,177.024371,7470.956,43.75163,26707.057623,1.75957703148744
-c14,2545.253,7465.49,3726.01,235.670225,3304.362,25.469348,17302.254573,2.71601165490486
-c15,5417.213,7465.49,7661.092,10086.008,157.113483,0.174571,30787.091054,1.52639056132117
-c1,3168.108182
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c2,1001.6715,182.333568,1116.3819,128.244736,972.3579,0.090100,3401.079704,2.08323933524495
-c3,1001.6715,182.333568,96.183552,686.8162,510.1917,0.090100,2477.28662,2.86009010293326
-c4,1291.7259,182.333568,96.183552,1293.3931,82.574848,3.90263,2950.113598,2.40169158049854
-c5,1001.6715,1366.6231,1116.3819,128.244736,82.574848,0.090100,3695.586184,1.91722305623754
-c6,1001.6715,182.333568,96.183552,686.8162,972.3579,0.090100,2939.45282,2.41040201113342
-c7,1001.6715,2407.5018,96.183552,128.244736,972.3579,0.090100,4606.049588,1.53825158431508
-c8,1291.7259,182.333568,1116.3819,686.8162,972.3579,0.090100,4249.705568,1.66723622375817
-c9,1001.6715,2407.5018,96.183552,686.8162,82.574848,0.090100,4274.838,1.65743428505515
-c10,1001.6715,1366.6231,96.183552,128.244736,510.1917,0.090100,3103.004688,2.28335555826413
-c11,1291.7259,182.333568,96.183552,686.8162,972.3579,0.090100,3229.50722,2.1939145906627
-c12,1001.6715,182.333568,96.183552,686.8162,510.1917,7.430588,2484.627108,2.85164036165541
-c13,1001.6715,2407.5018,96.183552,128.244736,972.3579,7.430588,4613.390076,1.53580403124351
-c14,1001.6715,1366.6231,653.4248,128.244736,510.1917,3.90263,3664.058466,1.93371997264085
-c15,1291.7259,1366.6231,1116.3819,1293.3931,82.574848,0.090100,5150.788948,1.37556851270217
-c1,1491.098304
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2551.85474,553.269866,357.336004,478.667351,352.195669,4.712122,4298.035752,10.9336280492246
-c2,2551.85474,658.1146,7661.092,478.667351,7470.956,4.594964,18825.279655,2.49627766278048
-c3,2551.85474,553.269866,279.934025,4576.494,3304.362,4.614914,11270.529545,4.1695578493818
-c4,5417.213,553.269866,279.934025,10086.008,352.195669,25.469348,16714.089908,2.81158742866093
-c5,2551.85474,7465.49,7874.613,537.313205,246.75388,4.614914,18680.639739,2.51560576902143
-c6,2551.85474,553.269866,357.336004,4576.494,7619.68,4.712122,15663.346732,3.00019694718077
-c7,2551.85474,16805.478,357.336004,478.667351,7470.956,4.614914,27668.907009,1.69840916241734
-c8,5417.213,742.825278,7661.092,4807.606,7619.68,4.623863,26253.040141,1.79000698268118
-c9,2551.85474,16805.478,402.949446,4576.494,272.76268,4.623863,24614.162729,1.9091904800692
-c10,2551.85474,7465.49,402.949446,537.313205,3304.362,4.623863,14266.593254,3.29392758186552
-c11,5417.213,658.1146,357.336004,4576.494,7619.68,4.712122,18633.549726,2.52196311421182
-c12,2551.85474,553.269866,300.881147,4576.494,3304.362,43.75163,11330.613383,4.14744756923415
-c13,2551.85474,16805.478,357.336004,478.667351,7470.956,84.15043,27748.442525,1.69354100275384
-c14,2551.85474,7465.49,3726.01,537.313205,3304.362,25.5298076,17610.5597526,2.66846288484474
-c15,5417.213,7576.191,7874.613,10086.008,352.195669,4.712122,31310.932791,1.50085356803622
-c1,4298.035752
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c2,0,324.065467,0,229.498883,0,1.216143,554.780493,0
-c3,0,324.065467,175.390266,0,0,1.216143,500.671876,0
-c4,0,324.065467,175.390266,0,151.959867,0,651.4156,0
-c5,0,0,0,229.498883,151.959867,1.216143,382.674893,0
-c6,0,324.065467,175.390266,0,0,1.216143,500.671876,0
-c7,0,0,175.390266,229.498883,0,1.216143,406.105292,0
-c8,0,324.065467,0,0,0,1.216143,325.28161,0
-c9,0,0,175.390266,0,151.959867,1.216143,328.566276,0
-c10,0,0,175.390266,229.498883,0,1.216143,406.105292,0
-c11,0,324.065467,175.390266,0,0,1.216143,500.671876,0
-c12,0,324.065467,175.390266,0,0,0,499.455733,0
-c13,0,0,175.390266,229.498883,0,0,404.889149,0
-c14,0,0,0,229.498883,0,0,229.498883,0
-c15,0,0,0,0,151.959867,1.216143,153.17601,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c2,0,78.347264,0,72.144097,0,3.321408,153.812769,0
-c3,0,78.347264,44.260116,0,0,3.321408,125.928788,0
-c4,0,78.347264,44.260116,0,43.122319,0,165.729699,0
-c5,0,0,0,72.144097,43.122319,3.321408,118.587824,0
-c6,0,78.347264,44.260116,0,0,3.321408,125.928788,0
-c7,0,0,44.260116,72.144097,0,3.321408,119.725621,0
-c8,0,78.347264,0,0,0,3.321408,81.668672,0
-c9,0,0,44.260116,0,43.122319,3.321408,90.703843,0
-c10,0,0,44.260116,72.144097,0,3.321408,119.725621,0
-c11,0,78.347264,44.260116,0,0,3.321408,125.928788,0
-c12,0,78.347264,44.260116,0,0,0,122.60738,0
-c13,0,0,44.260116,72.144097,0,0,116.404213,0
-c14,0,0,0,72.144097,0,0,72.144097,0
-c15,0,0,0,0,43.122319,3.321408,46.443727,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c2,0,36.608647,0,29.286620,0,0.701660,66.596927,0
-c3,0,36.608647,19.907558,0,0,0.701660,57.217865,0
-c4,0,36.608647,19.907558,0,18.163317,0,74.679522,0
-c5,0,0,0,29.286620,18.163317,0.701660,48.151597,0
-c6,0,36.608647,19.907558,0,0,0.701660,57.217865,0
-c7,0,0,19.907558,29.286620,0,0.701660,49.895838,0
-c8,0,36.608647,0,0,0,0.701660,37.310307,0
-c9,0,0,19.907558,0,18.163317,0.701660,38.772535,0
-c10,0,0,19.907558,29.286620,0,0.701660,49.895838,0
-c11,0,36.608647,19.907558,0,0,0.701660,57.217865,0
-c12,0,36.608647,19.907558,0,0,0,56.516205,0
-c13,0,0,19.907558,29.286620,0,0,49.194178,0
-c14,0,0,0,29.286620,0,0,29.28662,0
-c15,0,0,0,0,18.163317,0.701660,18.864977,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,6.60174,0,0,0,0,0,6.60174,0
-c2,6.60174,0,0,0,0,0,6.60174,0
-c3,6.60174,0,0,0,0,0,6.60174,0
-c4,0,0,0,0,0,0,0,0
-c5,6.60174,0,213.521,0,0,0,220.12274,0
-c6,6.60174,0,0,0,148.724,0,155.32574,0
-c7,6.60174,473.091,0,0,0,0,479.69274,0
-c8,0,0,0,231.112,148.724,0,379.836,0
-c9,6.60174,473.091,0,0,0,0,479.69274,0
-c10,6.60174,0,0,0,0,0,6.60174,0
-c11,0,0,0,0,148.724,0,148.724,0
-c12,6.60174,0,0,0,0,0,6.60174,0
-c13,6.60174,473.091,0,0,0,40.3988,520.09154,0
-c14,6.60174,0,0,0,0,0.0604596,6.6621996,0
-c15,0,110.701,213.521,0,0,0,324.222,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,5.44249,0,0,0,0,0,5.44249,0
-c2,5.44249,0,0,0,0,0,5.44249,0
-c3,5.44249,0,0,0,0,0,5.44249,0
-c4,0,0,0,0,0,0,0,0
-c5,5.44249,0,31.0982,0,0,0,36.54069,0
-c6,5.44249,0,0,0,20.7426,0,26.18509,0
-c7,5.44249,62.1589,0,0,0,0,67.60139,0
-c8,0,0,0,39.4494,20.7426,0,60.192,0
-c9,5.44249,62.1589,0,0,0,0,67.60139,0
-c10,5.44249,0,0,0,0,0,5.44249,0
-c11,0,0,0,0,20.7426,0,20.7426,0
-c12,5.44249,0,0,0,0,0,5.44249,0
-c13,5.44249,62.1589,0,0,0,7.36824,74.96963,0
-c14,5.44249,0,0,0,0,0.141077,5.583567,0
-c15,0,26.722,31.0982,0,0,0,57.8202,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c2,1007.11399,218.942215,1116.3819,157.531356,972.3579,0.79176,3473.119121,2.04002879807851
-c3,1007.11399,218.942215,116.09111,686.8162,510.1917,0.79176,2539.946975,2.78953183699704
-c4,1291.7259,218.942215,116.09111,1293.3931,100.738165,3.90263,3024.79312,2.34239589772685
-c5,1007.11399,1366.6231,1147.4801,157.531356,100.738165,0.79176,3780.278471,1.87427013563335
-c6,1007.11399,218.942215,116.09111,686.8162,993.1005,0.79176,3022.855775,2.34389713667708
-c7,1007.11399,2469.6607,116.09111,157.531356,972.3579,0.79176,4723.546816,1.49998790231133
-c8,1291.7259,218.942215,1116.3819,726.2656,993.1005,0.79176,4347.207875,1.62984225064594
-c9,1007.11399,2469.6607,116.09111,686.8162,100.738165,0.79176,4381.211925,1.61719250051588
-c10,1007.11399,1366.6231,116.09111,157.531356,510.1917,0.79176,3158.343016,2.24334816382249
-c11,1291.7259,218.942215,116.09111,686.8162,993.1005,0.79176,3307.467685,2.14220173576082
-c12,1007.11399,218.942215,116.09111,686.8162,510.1917,7.430588,2546.585803,2.78225966053343
-c13,1007.11399,2469.6607,116.09111,157.531356,972.3579,14.798828,4737.553884,1.49555303304803
-c14,1007.11399,1366.6231,653.4248,157.531356,510.1917,4.043707,3698.928653,1.91549059285165
-c15,1291.7259,1393.3451,1147.4801,1293.3931,100.738165,0.79176,5227.474125,1.35538941466517
-c1,1601.208596
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_results2.csv b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_results2.csv
deleted file mode 100644
index b3853553e5d5a2c768c71ee52ff3748070d0a4b4..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_promise_results2.csv
+++ /dev/null
@@ -1,616 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2545.253,111.955336,52.545506,104.439555,157.113483,0.057413,2971.364293,15.8153356951801
-c2,2545.253,111.955336,52.545506,177.024371,157.113483,0.057413,3043.949109,15.4382094185593
-c3,2545.253,150.857135,52.545506,77.507541,157.113483,0.077363,2983.354028,15.7517757972311
-c4,5417.213,150.857135,52.545506,10086.008,51.671694,0.057413,15758.352748,2.98210896794104
-c5,2545.253,111.955336,52.545506,235.670225,157.113483,0.077363,3102.614913,15.1462959958287
-c6,5417.213,340.412547,52.545506,235.670225,69.626370,25.469348,6140.936996,7.65243555069303
-c7,2545.253,111.955336,52.545506,177.024371,77.680494,25.469348,2989.928055,15.7171419886509
-c8,5417.213,97.584511,90.627243,235.670225,46.727966,25.469348,5913.292293,7.94703224951792
-c9,2545.253,111.955336,54.515960,70.091949,157.113483,0.057413,2938.987141,15.9895642595612
-c10,2545.253,16332.387,52.545506,235.670225,51.671694,25.469348,19242.996773,2.44208974611103
-c11,2545.253,150.857135,52.545506,177.024371,157.113483,43.75163,3126.545125,15.0303680152268
-c12,2545.253,150.857135,52.545506,177.024371,157.113483,0.086312,3082.879807,15.2432552566505
-c13,2545.253,97.584511,3726.01,177.024371,157.113483,0.057413,6703.042778,7.01071531292687
-c14,2545.253,168.307738,52.545506,177.024371,157.113483,0.174571,3100.418669,15.1570251792654
-c15,5417.213,340.412547,52.545506,177.024371,157.113483,43.75163,6188.060537,7.59416044940091
-c16,2545.253,111.955336,52.545506,10086.008,157.113483,43.75163,12996.626955,3.61579394016088
-c17,2545.253,111.955336,183.299064,70.091949,157.113483,0.057413,3067.770245,15.3183322293312
-c18,2545.253,111.955336,3726.01,70.091949,69.626370,43.75163,6566.688285,7.15628983634191
-c19,5417.213,97.584511,3726.01,10086.008,69.626370,43.75163,19440.193511,2.41731776382152
-c20,2545.253,168.307738,81.230765,77.507541,46.727966,25.469348,2944.496358,15.9596474366009
-c21,2545.253,168.307738,52.545506,177.024371,3304.362,0.057413,6247.550028,7.5218484662313
-c22,2545.253,16332.387,52.545506,4576.494,157.113483,0.057413,23663.850402,1.98586131804831
-c23,5417.213,150.857135,54.515960,104.439555,7470.956,0.051920,13198.03357,3.56061565859
-c24,5417.213,101.243926,60.283643,4576.494,157.113483,0.051920,10312.399972,4.55695328147661
-c25,5417.213,111.955336,54.515960,177.024371,157.113483,0.057413,5917.879563,7.94087207311975
-c26,2545.253,255.701869,81.230765,177.024371,46.727966,0.057413,3105.995384,15.1298112280192
-c27,2545.253,255.701869,81.230765,10086.008,46.727966,0.057413,13014.979013,3.6106954102647
-c28,2545.253,340.412547,81.230765,104.439555,51.671694,0.077363,3123.084924,15.0470208101513
-c29,5417.213,150.857135,183.299064,177.024371,46.727966,43.75163,6018.873166,7.80762831699072
-c30,2545.253,340.412547,60.283643,77.507541,77.680494,0.086312,3101.223537,15.1530914402095
-c31,2545.253,111.955336,54.515960,177.024371,157.113483,0.174571,3046.036721,15.4276287876823
-c32,5417.213,340.412547,81.230765,177.024371,51.671694,0.077363,6067.62974,7.74488994668139
-c33,5417.213,340.412547,7661.092,77.507541,157.113483,0.057413,13653.395984,3.44186347915811
-c34,2545.253,111.955336,7661.092,116.520741,118.016247,0.057413,10552.894737,4.45310278116628
-c35,5417.213,255.701869,183.299064,77.507541,69.626370,0.086312,6003.434156,7.82770716628299
-c36,2545.253,111.955336,7661.092,177.024371,51.671694,0.057413,10547.053814,4.45556889451584
-c37,2545.253,101.243926,90.627243,235.670225,7470.956,0.057413,10443.807807,4.4996160180716
-c38,2545.253,111.955336,90.627243,177.024371,157.113483,0.057413,3082.030846,15.2474540883471
-c39,2545.253,150.857135,183.299064,177.024371,77.680494,0.057413,3134.171477,14.9937947535665
-c40,2545.253,111.955336,60.283643,177.024371,157.113483,0.077363,3051.707196,15.3989622168534
-c41,2545.253,111.955336,81.230765,235.670225,77.680494,0.131129,3051.920949,15.3978836914532
-c42,2545.253,340.412547,183.299064,177.024371,157.113483,0.057413,3403.159878,13.8086736009447
-c43,2545.253,168.307738,81.230765,235.670225,69.626370,0.174571,3100.262669,15.1577878552397
-c44,2545.253,340.412547,60.283643,104.439555,157.113483,0.174571,3207.676799,14.6502053753139
-c45,2545.253,150.857135,60.283643,235.670225,69.626370,0.086312,3061.776685,15.3483185247941
-c46,5417.213,111.955336,7661.092,116.520741,157.113483,0.057413,13463.951973,3.49029208461295
-c47,2545.253,340.412547,54.515960,235.670225,7470.956,0.174571,10646.982303,4.41375063555649
-c48,2545.253,111.955336,81.230765,104.439555,3304.362,0.057413,6147.298069,7.64451699853767
-c49,2545.253,111.955336,90.627243,177.024371,157.113483,0.086312,3082.059745,15.247311120268
-c50,5417.213,111.955336,81.230765,10086.008,157.113483,0.057413,15853.577997,2.96419679270338
-c51,2545.253,340.412547,81.230765,235.670225,157.113483,0.057413,3359.737433,13.9871418187952
-c9,2938.987141
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c2,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c3,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c4,1291.7259,182.333568,96.183552,1293.3931,82.574848,0.090100,2946.301068,2.40479938268144
-c5,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c6,1291.7259,182.333568,96.183552,128.244736,82.574848,3.90263,1784.965234,3.96941223173357
-c7,1001.6715,182.333568,96.183552,128.244736,82.574848,3.90263,1494.910834,4.73958887372751
-c8,1291.7259,182.333568,96.183552,128.244736,82.574848,3.90263,1784.965234,3.96941223173357
-c9,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c10,1001.6715,2407.5018,96.183552,128.244736,82.574848,3.90263,3720.079066,1.90460012108248
-c11,1001.6715,182.333568,96.183552,128.244736,82.574848,7.430588,1498.438792,4.72842987980854
-c12,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c13,1001.6715,182.333568,653.4248,128.244736,82.574848,0.090100,2048.339552,3.45902752167198
-c14,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c15,1291.7259,182.333568,96.183552,128.244736,82.574848,7.430588,1788.493192,3.96158222213785
-c16,1001.6715,182.333568,96.183552,1293.3931,82.574848,7.430588,2663.587156,2.66004547590462
-c17,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c18,1001.6715,182.333568,653.4248,128.244736,82.574848,7.430588,2055.68004,3.44667591622498
-c19,1291.7259,182.333568,653.4248,1293.3931,82.574848,7.430588,3510.882804,2.01808588430211
-c20,1001.6715,182.333568,96.183552,128.244736,82.574848,3.90263,1494.910834,4.73958887372751
-c21,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c22,1001.6715,2407.5018,96.183552,686.8162,82.574848,0.090100,4274.838,1.65743428505515
-c23,1291.7259,182.333568,96.183552,128.244736,972.3579,0.090100,2670.935756,2.65272683882829
-c24,1291.7259,182.333568,96.183552,686.8162,82.574848,0.090100,2339.724168,3.028247100269
-c25,1291.7259,182.333568,96.183552,128.244736,82.574848,0.090100,1781.152704,3.97790869715859
-c26,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c27,1001.6715,182.333568,96.183552,1293.3931,82.574848,0.090100,2656.246668,2.66739646156249
-c28,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c29,1291.7259,182.333568,96.183552,128.244736,82.574848,7.430588,1788.493192,3.96158222213785
-c30,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c31,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c32,1291.7259,182.333568,96.183552,128.244736,82.574848,0.090100,1781.152704,3.97790869715859
-c33,1291.7259,182.333568,1116.3819,128.244736,82.574848,0.090100,2801.351052,2.52923066247569
-c34,1001.6715,182.333568,1116.3819,128.244736,82.574848,0.090100,2511.296652,2.82135642645868
-c35,1291.7259,182.333568,96.183552,128.244736,82.574848,0.090100,1781.152704,3.97790869715859
-c36,1001.6715,182.333568,1116.3819,128.244736,82.574848,0.090100,2511.296652,2.82135642645868
-c37,1001.6715,182.333568,96.183552,128.244736,972.3579,0.090100,2380.881356,2.97589920411392
-c38,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c39,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c40,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c41,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c42,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c43,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c44,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c45,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c46,1291.7259,182.333568,1116.3819,128.244736,82.574848,0.090100,2801.351052,2.52923066247569
-c47,1001.6715,182.333568,96.183552,128.244736,972.3579,0.090100,2380.881356,2.97589920411392
-c48,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c49,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c50,1291.7259,182.333568,96.183552,1293.3931,82.574848,0.090100,2946.301068,2.40479938268144
-c51,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c1,1491.098304
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2551.85474,514.368067,272.195888,406.082535,352.195669,4.594964,4101.291863,11.4581272857311
-c2,2551.85474,514.368067,272.195888,478.667351,352.195669,4.594964,4173.876679,11.2588674357701
-c3,2551.85474,553.269866,272.195888,379.150521,352.195669,4.614914,4113.281598,11.4247281850036
-c4,5417.213,553.269866,272.195888,10086.008,246.75388,4.594964,16580.035598,2.83431991365788
-c5,2551.85474,514.368067,272.195888,537.313205,352.195669,4.614914,4232.542483,11.1028121812047
-c6,5417.213,742.825278,272.195888,537.313205,264.708556,25.469348,7259.725275,6.47312713919292
-c7,2551.85474,514.368067,272.195888,478.667351,272.76268,25.469348,4115.318074,11.4190746282744
-c8,5417.213,499.997242,310.277625,537.313205,241.810152,25.469348,7032.080572,6.68267722455389
-c9,2551.85474,514.368067,274.166342,371.734929,352.195669,4.594964,4068.914711,11.5493018484825
-c10,2551.85474,16805.478,272.195888,537.313205,246.75388,25.469348,20439.065061,2.29918173741469
-c11,2551.85474,553.269866,272.195888,478.667351,352.195669,43.75163,4251.935144,11.0521733402015
-c12,2551.85474,553.269866,272.195888,478.667351,352.195669,4.623863,4212.807377,11.1548238566706
-c13,2551.85474,499.997242,3726.01,478.667351,352.195669,4.594964,7613.319966,6.17248781617163
-c14,2551.85474,570.720469,272.195888,478.667351,352.195669,4.712122,4230.346239,11.10857636283
-c15,5417.213,742.825278,272.195888,478.667351,352.195669,43.75163,7306.848816,6.43138046074799
-c16,2551.85474,514.368067,272.195888,10086.008,352.195669,43.75163,13820.373994,3.40027882229336
-c17,2551.85474,514.368067,402.949446,371.734929,352.195669,4.594964,4197.697815,11.1949755079029
-c18,2551.85474,514.368067,3726.01,371.734929,264.708556,43.75163,7472.427922,6.28886958959589
-c19,5417.213,499.997242,3726.01,10243.514,264.708556,43.75163,20195.194428,2.32694591195175
-c20,2551.85474,570.720469,300.881147,379.150521,241.810152,25.469348,4069.886377,11.5465445077081
-c21,2551.85474,570.720469,272.195888,478.667351,3304.362,4.594964,7182.395412,6.54282060483667
-c22,2551.85474,16805.478,272.195888,4576.494,352.195669,4.594964,24562.813261,1.91318171324031
-c23,5417.213,553.269866,274.166342,406.082535,7470.956,4.589471,14126.277214,3.32664610098139
-c24,5417.213,503.656657,279.934025,4576.494,352.195669,4.589471,11134.082822,4.22065523287469
-c25,5417.213,514.368067,274.166342,478.667351,352.195669,4.594964,7041.205393,6.67401702658985
-c26,2551.85474,658.1146,300.881147,478.667351,241.810152,4.594964,4235.922954,11.093951601322
-c27,2551.85474,658.1146,300.881147,10086.008,241.810152,4.594964,13843.263603,3.39465651714892
-c28,2551.85474,742.825278,300.881147,406.082535,246.75388,4.614914,4253.012494,11.049373663811
-c29,5417.213,553.269866,402.949446,478.667351,241.810152,43.75163,7137.661445,6.58382651681196
-c30,2551.85474,742.825278,279.934025,379.150521,272.76268,4.623863,4231.151107,11.1064632410808
-c31,2551.85474,514.368067,274.166342,478.667351,352.195669,4.712122,4175.964291,11.2532390001407
-c32,5417.213,742.825278,300.881147,478.667351,246.75388,4.614914,7190.95557,6.53503199081743
-c33,5417.213,742.825278,7661.092,379.150521,352.195669,4.594964,14557.071432,3.22819910891402
-c34,2551.85474,514.368067,7661.092,418.163721,313.098433,4.594964,11463.171925,4.09948705694312
-c35,5417.213,658.1146,402.949446,379.150521,264.708556,4.623863,7126.759986,6.59389747668293
-c36,2551.85474,514.368067,7661.092,478.667351,246.75388,4.594964,11457.331002,4.10157696671582
-c37,2551.85474,503.656657,310.277625,537.313205,7470.956,4.594964,11378.653191,4.12993736131933
-c38,2551.85474,514.368067,310.277625,478.667351,352.195669,4.594964,4211.958416,11.1570722193694
-c39,2551.85474,553.269866,402.949446,478.667351,272.76268,4.594964,4264.099047,11.0206455638026
-c40,2551.85474,514.368067,279.934025,478.667351,352.195669,4.614914,4181.634766,11.2379791287114
-c41,2551.85474,514.368067,300.881147,537.313205,272.76268,4.66868,4181.848519,11.2374047052993
-c42,2551.85474,742.825278,402.949446,478.667351,352.195669,4.594964,4533.087448,10.3666926461046
-c43,2551.85474,570.720469,300.881147,537.313205,264.708556,4.712122,4230.190239,11.1089860223899
-c44,2551.85474,742.825278,279.934025,406.082535,352.195669,4.712122,4337.604369,10.8338890011412
-c45,2551.85474,553.269866,279.934025,537.313205,264.708556,4.623863,4191.704255,11.210982781251
-c46,5417.213,514.368067,7661.092,418.163721,352.195669,4.594964,14367.627421,3.27076445149444
-c47,2551.85474,742.825278,274.166342,537.313205,7470.956,4.712122,11581.827687,4.05748783458405
-c48,2551.85474,514.368067,300.881147,406.082535,3304.362,4.594964,7082.143453,6.63543812636976
-c49,2551.85474,514.368067,310.277625,478.667351,352.195669,4.623863,4211.987315,11.1569956692285
-c50,5417.213,514.368067,300.881147,10086.008,352.195669,4.594964,16675.260847,2.8181343306927
-c51,2551.85474,742.825278,300.881147,537.313205,352.195669,4.594964,4489.665003,10.4669556124797
-c9,4068.914711
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c2,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c3,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c4,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c5,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c6,0,324.065467,175.390266,229.498883,151.959867,0,880.914483,0
-c7,0,324.065467,175.390266,229.498883,151.959867,0,880.914483,0
-c8,0,324.065467,175.390266,229.498883,151.959867,0,880.914483,0
-c9,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c10,0,0,175.390266,229.498883,151.959867,0,556.849016,0
-c11,0,324.065467,175.390266,229.498883,151.959867,0,880.914483,0
-c12,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c13,0,324.065467,0,229.498883,151.959867,1.216143,706.74036,0
-c14,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c15,0,324.065467,175.390266,229.498883,151.959867,0,880.914483,0
-c16,0,324.065467,175.390266,0,151.959867,0,651.4156,0
-c17,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c18,0,324.065467,0,229.498883,151.959867,0,705.524217,0
-c19,0,324.065467,0,0,151.959867,0,476.025334,0
-c20,0,324.065467,175.390266,229.498883,151.959867,0,880.914483,0
-c21,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c22,0,0,175.390266,0,151.959867,1.216143,328.566276,0
-c23,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c24,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c25,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c26,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c27,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c28,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c29,0,324.065467,175.390266,229.498883,151.959867,0,880.914483,0
-c30,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c31,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c32,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c33,0,324.065467,0,229.498883,151.959867,1.216143,706.74036,0
-c34,0,324.065467,0,229.498883,151.959867,1.216143,706.74036,0
-c35,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c36,0,324.065467,0,229.498883,151.959867,1.216143,706.74036,0
-c37,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c38,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c39,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c40,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c41,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c42,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c43,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c44,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c45,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c46,0,324.065467,0,229.498883,151.959867,1.216143,706.74036,0
-c47,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c48,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c49,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c50,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c51,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c2,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c3,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c4,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c5,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c6,0,78.347264,44.260116,72.144097,43.122319,0,237.873796,0
-c7,0,78.347264,44.260116,72.144097,43.122319,0,237.873796,0
-c8,0,78.347264,44.260116,72.144097,43.122319,0,237.873796,0
-c9,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c10,0,0,44.260116,72.144097,43.122319,0,159.526532,0
-c11,0,78.347264,44.260116,72.144097,43.122319,0,237.873796,0
-c12,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c13,0,78.347264,0,72.144097,43.122319,3.321408,196.935088,0
-c14,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c15,0,78.347264,44.260116,72.144097,43.122319,0,237.873796,0
-c16,0,78.347264,44.260116,0,43.122319,0,165.729699,0
-c17,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c18,0,78.347264,0,72.144097,43.122319,0,193.61368,0
-c19,0,78.347264,0,0,43.122319,0,121.469583,0
-c20,0,78.347264,44.260116,72.144097,43.122319,0,237.873796,0
-c21,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c22,0,0,44.260116,0,43.122319,3.321408,90.703843,0
-c23,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c24,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c25,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c26,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c27,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c28,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c29,0,78.347264,44.260116,72.144097,43.122319,0,237.873796,0
-c30,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c31,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c32,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c33,0,78.347264,0,72.144097,43.122319,3.321408,196.935088,0
-c34,0,78.347264,0,72.144097,43.122319,3.321408,196.935088,0
-c35,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c36,0,78.347264,0,72.144097,43.122319,3.321408,196.935088,0
-c37,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c38,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c39,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c40,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c41,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c42,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c43,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c44,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c45,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c46,0,78.347264,0,72.144097,43.122319,3.321408,196.935088,0
-c47,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c48,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c49,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c50,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c51,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c2,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c3,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c4,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c5,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c6,0,36.608647,19.907558,29.286620,18.163317,0,103.966142,0
-c7,0,36.608647,19.907558,29.286620,18.163317,0,103.966142,0
-c8,0,36.608647,19.907558,29.286620,18.163317,0,103.966142,0
-c9,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c10,0,0,19.907558,29.286620,18.163317,0,67.357495,0
-c11,0,36.608647,19.907558,29.286620,18.163317,0,103.966142,0
-c12,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c13,0,36.608647,0,29.286620,18.163317,0.701660,84.760244,0
-c14,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c15,0,36.608647,19.907558,29.286620,18.163317,0,103.966142,0
-c16,0,36.608647,19.907558,0,18.163317,0,74.679522,0
-c17,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c18,0,36.608647,0,29.286620,18.163317,0,84.058584,0
-c19,0,36.608647,0,0,18.163317,0,54.771964,0
-c20,0,36.608647,19.907558,29.286620,18.163317,0,103.966142,0
-c21,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c22,0,0,19.907558,0,18.163317,0.701660,38.772535,0
-c23,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c24,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c25,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c26,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c27,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c28,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c29,0,36.608647,19.907558,29.286620,18.163317,0,103.966142,0
-c30,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c31,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c32,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c33,0,36.608647,0,29.286620,18.163317,0.701660,84.760244,0
-c34,0,36.608647,0,29.286620,18.163317,0.701660,84.760244,0
-c35,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c36,0,36.608647,0,29.286620,18.163317,0.701660,84.760244,0
-c37,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c38,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c39,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c40,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c41,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c42,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c43,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c44,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c45,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c46,0,36.608647,0,29.286620,18.163317,0.701660,84.760244,0
-c47,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c48,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c49,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c50,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c51,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,6.60174,0,0,0,0,0,6.60174,0
-c2,6.60174,0,0,0,0,0,6.60174,0
-c3,6.60174,0,0,0,0,0,6.60174,0
-c4,0,0,0,0,0,0,0,0
-c5,6.60174,0,0,0,0,0,6.60174,0
-c6,0,0,0,0,0,0,0,0
-c7,6.60174,0,0,0,0,0,6.60174,0
-c8,0,0,0,0,0,0,0,0
-c9,6.60174,0,0,0,0,0,6.60174,0
-c10,6.60174,473.091,0,0,0,0,479.69274,0
-c11,6.60174,0,0,0,0,0,6.60174,0
-c12,6.60174,0,0,0,0,0,6.60174,0
-c13,6.60174,0,0,0,0,0,6.60174,0
-c14,6.60174,0,0,0,0,0,6.60174,0
-c15,0,0,0,0,0,0,0,0
-c16,6.60174,0,0,0,0,0,6.60174,0
-c17,6.60174,0,0,0,0,0,6.60174,0
-c18,6.60174,0,0,0,0,0,6.60174,0
-c19,0,0,0,157.506,0,0,157.506,0
-c20,6.60174,0,0,0,0,0,6.60174,0
-c21,6.60174,0,0,0,0,0,6.60174,0
-c22,6.60174,473.091,0,0,0,0,479.69274,0
-c23,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0
-c26,6.60174,0,0,0,0,0,6.60174,0
-c27,6.60174,0,0,0,0,0,6.60174,0
-c28,6.60174,0,0,0,0,0,6.60174,0
-c29,0,0,0,0,0,0,0,0
-c30,6.60174,0,0,0,0,0,6.60174,0
-c31,6.60174,0,0,0,0,0,6.60174,0
-c32,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0
-c34,6.60174,0,0,0,0,0,6.60174,0
-c35,0,0,0,0,0,0,0,0
-c36,6.60174,0,0,0,0,0,6.60174,0
-c37,6.60174,0,0,0,0,0,6.60174,0
-c38,6.60174,0,0,0,0,0,6.60174,0
-c39,6.60174,0,0,0,0,0,6.60174,0
-c40,6.60174,0,0,0,0,0,6.60174,0
-c41,6.60174,0,0,0,0,0,6.60174,0
-c42,6.60174,0,0,0,0,0,6.60174,0
-c43,6.60174,0,0,0,0,0,6.60174,0
-c44,6.60174,0,0,0,0,0,6.60174,0
-c45,6.60174,0,0,0,0,0,6.60174,0
-c46,0,0,0,0,0,0,0,0
-c47,6.60174,0,0,0,0,0,6.60174,0
-c48,6.60174,0,0,0,0,0,6.60174,0
-c49,6.60174,0,0,0,0,0,6.60174,0
-c50,0,0,0,0,0,0,0,0
-c51,6.60174,0,0,0,0,0,6.60174,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,5.44249,0,0,0,0,0,5.44249,0
-c2,5.44249,0,0,0,0,0,5.44249,0
-c3,5.44249,0,0,0,0,0,5.44249,0
-c4,0,0,0,0,0,0,0,0
-c5,5.44249,0,0,0,0,0,5.44249,0
-c6,0,0,0,0,0,0,0,0
-c7,5.44249,0,0,0,0,0,5.44249,0
-c8,0,0,0,0,0,0,0,0
-c9,5.44249,0,0,0,0,0,5.44249,0
-c10,5.44249,62.1589,0,0,0,0,67.60139,0
-c11,5.44249,0,0,0,0,0,5.44249,0
-c12,5.44249,0,0,0,0,0,5.44249,0
-c13,5.44249,0,0,0,0,0,5.44249,0
-c14,5.44249,0,0,0,0,0,5.44249,0
-c15,0,0,0,0,0,0,0,0
-c16,5.44249,0,0,0,0,0,5.44249,0
-c17,5.44249,0,0,0,0,0,5.44249,0
-c18,5.44249,0,0,0,0,0,5.44249,0
-c19,0,0,0,20.805,0,0,20.805,0
-c20,5.44249,0,0,0,0,0,5.44249,0
-c21,5.44249,0,0,0,0,0,5.44249,0
-c22,5.44249,62.1589,0,0,0,0,67.60139,0
-c23,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0
-c26,5.44249,0,0,0,0,0,5.44249,0
-c27,5.44249,0,0,0,0,0,5.44249,0
-c28,5.44249,0,0,0,0,0,5.44249,0
-c29,0,0,0,0,0,0,0,0
-c30,5.44249,0,0,0,0,0,5.44249,0
-c31,5.44249,0,0,0,0,0,5.44249,0
-c32,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0
-c34,5.44249,0,0,0,0,0,5.44249,0
-c35,0,0,0,0,0,0,0,0
-c36,5.44249,0,0,0,0,0,5.44249,0
-c37,5.44249,0,0,0,0,0,5.44249,0
-c38,5.44249,0,0,0,0,0,5.44249,0
-c39,5.44249,0,0,0,0,0,5.44249,0
-c40,5.44249,0,0,0,0,0,5.44249,0
-c41,5.44249,0,0,0,0,0,5.44249,0
-c42,5.44249,0,0,0,0,0,5.44249,0
-c43,5.44249,0,0,0,0,0,5.44249,0
-c44,5.44249,0,0,0,0,0,5.44249,0
-c45,5.44249,0,0,0,0,0,5.44249,0
-c46,0,0,0,0,0,0,0,0
-c47,5.44249,0,0,0,0,0,5.44249,0
-c48,5.44249,0,0,0,0,0,5.44249,0
-c49,5.44249,0,0,0,0,0,5.44249,0
-c50,0,0,0,0,0,0,0,0
-c51,5.44249,0,0,0,0,0,5.44249,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c2,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c3,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c4,1291.7259,218.942215,116.09111,1293.3931,100.738165,0.79176,3021.68225,2.344807431529
-c5,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c6,1291.7259,218.942215,116.09111,157.531356,100.738165,3.90263,1888.931376,3.75093714092993
-c7,1007.11399,218.942215,116.09111,157.531356,100.738165,3.90263,1604.319466,4.41636652706634
-c8,1291.7259,218.942215,116.09111,157.531356,100.738165,3.90263,1888.931376,3.75093714092993
-c9,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c10,1007.11399,2469.6607,116.09111,157.531356,100.738165,3.90263,3855.037951,1.83792303377189
-c11,1007.11399,218.942215,116.09111,157.531356,100.738165,7.430588,1607.847424,4.40667608354634
-c12,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c13,1007.11399,218.942215,653.4248,157.531356,100.738165,0.79176,2138.542286,3.31312733214164
-c14,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c15,1291.7259,218.942215,116.09111,157.531356,100.738165,7.430588,1892.459334,3.74394457429622
-c16,1007.11399,218.942215,116.09111,1293.3931,100.738165,7.430588,2743.709168,2.58236662048553
-c17,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c18,1007.11399,218.942215,653.4248,157.531356,100.738165,7.430588,2145.181114,3.30287398740944
-c19,1291.7259,218.942215,653.4248,1314.1981,100.738165,7.430588,3586.459768,1.97555904451013
-c20,1007.11399,218.942215,116.09111,157.531356,100.738165,3.90263,1604.319466,4.41636652706634
-c21,1007.11399,218.942215,116.09111,157.531356,510.1917,0.79176,2010.662131,3.52384558717063
-c22,1007.11399,2469.6607,116.09111,686.8162,100.738165,0.79176,4381.211925,1.61719250051588
-c23,1291.7259,218.942215,116.09111,157.531356,972.3579,0.79176,2757.440241,2.5695073523986
-c24,1291.7259,218.942215,116.09111,686.8162,100.738165,0.79176,2415.10535,2.93372830987566
-c25,1291.7259,218.942215,116.09111,157.531356,100.738165,0.79176,1885.820506,3.75712472727112
-c26,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c27,1007.11399,218.942215,116.09111,1293.3931,100.738165,0.79176,2737.07034,2.58863021077382
-c28,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c29,1291.7259,218.942215,116.09111,157.531356,100.738165,7.430588,1892.459334,3.74394457429622
-c30,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c31,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c32,1291.7259,218.942215,116.09111,157.531356,100.738165,0.79176,1885.820506,3.75712472727112
-c33,1291.7259,218.942215,1116.3819,157.531356,100.738165,0.79176,2886.111296,2.45495140617919
-c34,1007.11399,218.942215,1116.3819,157.531356,100.738165,0.79176,2601.499386,2.723530513125
-c35,1291.7259,218.942215,116.09111,157.531356,100.738165,0.79176,1885.820506,3.75712472727112
-c36,1007.11399,218.942215,1116.3819,157.531356,100.738165,0.79176,2601.499386,2.723530513125
-c37,1007.11399,218.942215,116.09111,157.531356,972.3579,0.79176,2472.828331,2.86524659017074
-c38,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c39,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c40,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c41,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c42,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c43,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c44,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c45,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c46,1291.7259,218.942215,1116.3819,157.531356,100.738165,0.79176,2886.111296,2.45495140617919
-c47,1007.11399,218.942215,116.09111,157.531356,972.3579,0.79176,2472.828331,2.86524659017074
-c48,1007.11399,218.942215,116.09111,157.531356,510.1917,0.79176,2010.662131,3.52384558717063
-c49,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c50,1291.7259,218.942215,116.09111,1293.3931,100.738165,0.79176,3021.68225,2.344807431529
-c51,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c1,1601.208596
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_results1.csv b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_results1.csv
deleted file mode 100644
index ca505f7f4aef79e5d466ac7e797f2040a8af1225..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_results1.csv
+++ /dev/null
@@ -1,385 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c2,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c3,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c4,2545.253,255.701869,183.299064,4576.494,157.113483,0.174571,7718.035987,6.08874133500796
-c5,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c6,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c7,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c8,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c9,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c10,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c11,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c12,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c13,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c14,2545.253,255.701869,137.685622,177.024371,157.113483,0.174571,3272.952916,14.3580201482489
-c15,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c16,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c17,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c18,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c19,2545.253,255.701869,183.299064,4576.494,157.113483,0.174571,7718.035987,6.08874133500796
-c20,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c21,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c22,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c23,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c24,2545.253,255.701869,137.685622,4576.494,157.113483,0.174571,7672.422545,6.12493960804215
-c25,2545.253,255.701869,183.299064,4576.494,157.113483,0.174571,7718.035987,6.08874133500796
-c26,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c27,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c28,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c29,2545.253,255.701869,3726.01,4576.494,157.113483,0.174571,11260.746923,4.17318009649066
-c30,2545.253,340.412547,183.299064,4576.494,3304.362,0.174571,10949.995182,4.29161146993817
-c14,3272.952916
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c2,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c3,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c4,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c5,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c6,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c7,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c8,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c9,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c10,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c11,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c12,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c13,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c14,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c15,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c16,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c17,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c18,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c19,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c20,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c21,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c22,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c23,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c24,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c25,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c26,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c27,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c28,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c29,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c30,1001.6715,182.333568,96.183552,686.8162,510.1917,0.090100,2477.28662,2.86009010293326
-c14,1491.098304
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c2,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c3,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c4,2551.85474,658.1146,402.949446,4576.494,352.195669,4.712122,8546.320577,5.4986382004678
-c5,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c6,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c7,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c8,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c9,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c10,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c11,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c12,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c13,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c14,2551.85474,658.1146,357.336004,478.667351,352.195669,4.712122,4402.880486,10.6732682002382
-c15,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c16,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c17,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c18,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c19,2551.85474,658.1146,402.949446,4576.494,352.195669,4.712122,8546.320577,5.4986382004678
-c20,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c21,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c22,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c23,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c24,2551.85474,658.1146,357.336004,4576.494,352.195669,4.712122,8500.707135,5.52814301785562
-c25,2551.85474,658.1146,402.949446,4576.494,352.195669,4.712122,8546.320577,5.4986382004678
-c26,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c27,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c28,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c29,2551.85474,658.1146,3726.01,4576.494,352.195669,4.712122,11869.381131,3.95918914671517
-c30,2551.85474,742.825278,402.949446,4576.494,3304.362,4.712122,11583.197586,4.05700797153778
-c14,4402.880486
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c2,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c3,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c4,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c5,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c6,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c7,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c8,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c9,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c10,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c11,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c12,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c13,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c14,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c15,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c16,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c17,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c18,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c19,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c20,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c21,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c22,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c23,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c24,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c25,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c26,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c27,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c28,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c29,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c30,0,324.065467,175.390266,0,0,1.216143,500.671876,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c2,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c3,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c4,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c5,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c6,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c7,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c8,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c9,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c10,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c11,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c12,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c13,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c14,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c15,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c16,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c17,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c18,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c19,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c20,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c21,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c22,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c23,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c24,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c25,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c26,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c27,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c28,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c29,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c30,0,78.347264,44.260116,0,0,3.321408,125.928788,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c2,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c3,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c4,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c5,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c6,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c7,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c8,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c9,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c10,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c11,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c12,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c13,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c14,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c15,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c16,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c17,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c18,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c19,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c20,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c21,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c22,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c23,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c24,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c25,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c26,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c27,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c28,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c29,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c30,0,36.608647,19.907558,0,0,0.701660,57.217865,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,6.60174,0,0,0,0,0,6.60174,0
-c2,6.60174,0,0,0,0,0,6.60174,0
-c3,6.60174,0,0,0,0,0,6.60174,0
-c4,6.60174,0,0,0,0,0,6.60174,0
-c5,6.60174,0,0,0,0,0,6.60174,0
-c6,6.60174,0,0,0,0,0,6.60174,0
-c7,6.60174,0,0,0,0,0,6.60174,0
-c8,6.60174,0,0,0,0,0,6.60174,0
-c9,6.60174,0,0,0,0,0,6.60174,0
-c10,6.60174,0,0,0,0,0,6.60174,0
-c11,6.60174,0,0,0,0,0,6.60174,0
-c12,6.60174,0,0,0,0,0,6.60174,0
-c13,6.60174,0,0,0,0,0,6.60174,0
-c14,6.60174,0,0,0,0,0,6.60174,0
-c15,6.60174,0,0,0,0,0,6.60174,0
-c16,6.60174,0,0,0,0,0,6.60174,0
-c17,6.60174,0,0,0,0,0,6.60174,0
-c18,6.60174,0,0,0,0,0,6.60174,0
-c19,6.60174,0,0,0,0,0,6.60174,0
-c20,6.60174,0,0,0,0,0,6.60174,0
-c21,6.60174,0,0,0,0,0,6.60174,0
-c22,6.60174,0,0,0,0,0,6.60174,0
-c23,6.60174,0,0,0,0,0,6.60174,0
-c24,6.60174,0,0,0,0,0,6.60174,0
-c25,6.60174,0,0,0,0,0,6.60174,0
-c26,6.60174,0,0,0,0,0,6.60174,0
-c27,6.60174,0,0,0,0,0,6.60174,0
-c28,6.60174,0,0,0,0,0,6.60174,0
-c29,6.60174,0,0,0,0,0,6.60174,0
-c30,6.60174,0,0,0,0,0,6.60174,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,5.44249,0,0,0,0,0,5.44249,0
-c2,5.44249,0,0,0,0,0,5.44249,0
-c3,5.44249,0,0,0,0,0,5.44249,0
-c4,5.44249,0,0,0,0,0,5.44249,0
-c5,5.44249,0,0,0,0,0,5.44249,0
-c6,5.44249,0,0,0,0,0,5.44249,0
-c7,5.44249,0,0,0,0,0,5.44249,0
-c8,5.44249,0,0,0,0,0,5.44249,0
-c9,5.44249,0,0,0,0,0,5.44249,0
-c10,5.44249,0,0,0,0,0,5.44249,0
-c11,5.44249,0,0,0,0,0,5.44249,0
-c12,5.44249,0,0,0,0,0,5.44249,0
-c13,5.44249,0,0,0,0,0,5.44249,0
-c14,5.44249,0,0,0,0,0,5.44249,0
-c15,5.44249,0,0,0,0,0,5.44249,0
-c16,5.44249,0,0,0,0,0,5.44249,0
-c17,5.44249,0,0,0,0,0,5.44249,0
-c18,5.44249,0,0,0,0,0,5.44249,0
-c19,5.44249,0,0,0,0,0,5.44249,0
-c20,5.44249,0,0,0,0,0,5.44249,0
-c21,5.44249,0,0,0,0,0,5.44249,0
-c22,5.44249,0,0,0,0,0,5.44249,0
-c23,5.44249,0,0,0,0,0,5.44249,0
-c24,5.44249,0,0,0,0,0,5.44249,0
-c25,5.44249,0,0,0,0,0,5.44249,0
-c26,5.44249,0,0,0,0,0,5.44249,0
-c27,5.44249,0,0,0,0,0,5.44249,0
-c28,5.44249,0,0,0,0,0,5.44249,0
-c29,5.44249,0,0,0,0,0,5.44249,0
-c30,5.44249,0,0,0,0,0,5.44249,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c2,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c3,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c4,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c5,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c6,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c7,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c8,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c9,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c10,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c11,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c12,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c13,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c14,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c15,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c16,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c17,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c18,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c19,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c20,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c21,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c22,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c23,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c24,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c25,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c26,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c27,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c28,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c29,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c30,1007.11399,218.942215,116.09111,686.8162,510.1917,0.79176,2539.946975,2.78953183699704
-c14,1601.208596
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_results2.csv b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_results2.csv
deleted file mode 100644
index a2baf6209cb3a101a9d8f3b423c209e73f1d89cf..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_results2.csv
+++ /dev/null
@@ -1,385 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2545.253,7465.49,3726.01,4576.494,3304.362,0.174571,21617.783571,2.17381791136344
-c2,2545.253,340.412547,81.230765,177.024371,3304.362,0.174571,6448.457254,7.28749881843446
-c3,2545.253,7465.49,183.299064,104.439555,118.016247,0.174571,10416.672437,4.51133749103473
-c4,2545.253,150.857135,81.230765,177.024371,69.626370,0.174571,3024.166212,15.5392000636769
-c5,2545.253,150.857135,81.230765,235.670225,157.113483,0.174571,3170.299179,14.8229303331965
-c6,2545.253,150.857135,81.230765,4576.494,77.680494,0.174571,7431.689965,6.3233429996384
-c7,2545.253,340.412547,183.299064,235.670225,3304.362,0.174571,6609.171407,7.11028989007593
-c8,2545.253,340.412547,90.627243,235.670225,69.626370,0.174571,3281.763956,14.3194710363419
-c9,2545.253,7465.49,3726.01,177.024371,69.626370,0.174571,13983.578312,3.36059368807006
-c10,2545.253,7465.49,81.230765,177.024371,77.680494,0.174571,10346.853201,4.5417794164974
-c11,2545.253,340.412547,81.230765,177.024371,3304.362,0.174571,6448.457254,7.28749881843446
-c12,2545.253,7465.49,183.299064,104.439555,118.016247,0.174571,10416.672437,4.51133749103473
-c13,2545.253,150.857135,81.230765,177.024371,69.626370,0.174571,3024.166212,15.5392000636769
-c14,2545.253,150.857135,81.230765,235.670225,157.113483,0.174571,3170.299179,14.8229303331965
-c15,2545.253,150.857135,81.230765,4576.494,77.680494,0.174571,7431.689965,6.3233429996384
-c16,2545.253,340.412547,183.299064,235.670225,3304.362,0.174571,6609.171407,7.11028989007593
-c17,2545.253,340.412547,90.627243,235.670225,69.626370,0.174571,3281.763956,14.3194710363419
-c18,2545.253,7465.49,3726.01,177.024371,69.626370,0.174571,13983.578312,3.36059368807006
-c19,2545.253,150.857135,3726.01,4576.494,157.113483,0.174571,11155.902189,4.21240022820354
-c20,2545.253,7465.49,81.230765,177.024371,77.680494,0.174571,10346.853201,4.5417794164974
-c21,2545.253,7465.49,3726.01,4576.494,3304.362,0.174571,21617.783571,2.17381791136344
-c22,2545.253,340.412547,81.230765,177.024371,3304.362,0.174571,6448.457254,7.28749881843446
-c23,2545.253,7465.49,81.230765,177.024371,77.680494,0.174571,10346.853201,4.5417794164974
-c24,2545.253,150.857135,81.230765,177.024371,69.626370,0.174571,3024.166212,15.5392000636769
-c25,2545.253,340.412547,90.627243,235.670225,69.626370,0.174571,3281.763956,14.3194710363419
-c26,2545.253,340.412547,183.299064,235.670225,3304.362,0.174571,6609.171407,7.11028989007593
-c27,2545.253,7465.49,3726.01,177.024371,69.626370,0.174571,13983.578312,3.36059368807006
-c28,2545.253,7465.49,183.299064,104.439555,118.016247,0.174571,10416.672437,4.51133749103473
-c29,2545.253,150.857135,81.230765,4576.494,77.680494,0.174571,7431.689965,6.3233429996384
-c30,2545.253,150.857135,81.230765,235.670225,157.113483,0.174571,3170.299179,14.8229303331965
-c4,3024.166212
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1001.6715,1366.6231,653.4248,686.8162,510.1917,0.090100,4218.8174,1.67944293157976
-c2,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c3,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c4,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c5,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c6,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c7,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c8,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c9,1001.6715,1366.6231,653.4248,128.244736,82.574848,0.090100,3232.629084,2.19179584997523
-c10,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c11,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c12,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c13,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c14,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c15,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c16,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c17,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c18,1001.6715,1366.6231,653.4248,128.244736,82.574848,0.090100,3232.629084,2.19179584997523
-c19,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c20,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c21,1001.6715,1366.6231,653.4248,686.8162,510.1917,0.090100,4218.8174,1.67944293157976
-c22,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c23,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c24,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c25,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c26,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c27,1001.6715,1366.6231,653.4248,128.244736,82.574848,0.090100,3232.629084,2.19179584997523
-c28,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c29,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c30,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c4,1491.098304
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2551.85474,7465.49,3726.01,4576.494,3304.362,4.712122,21628.922862,2.17269835537176
-c2,2551.85474,742.825278,300.881147,478.667351,3304.362,4.712122,7383.302638,6.36478375810573
-c3,2551.85474,7465.49,402.949446,406.082535,313.098433,4.712122,11144.187276,4.21682835746318
-c4,2551.85474,553.269866,300.881147,478.667351,264.708556,4.712122,4154.093782,11.3124851490778
-c5,2551.85474,553.269866,300.881147,537.313205,352.195669,4.712122,4300.226749,10.9280572858448
-c6,2551.85474,553.269866,300.881147,4576.494,272.76268,4.712122,8259.974555,5.68925781383042
-c7,2551.85474,742.825278,402.949446,537.313205,3304.362,4.712122,7544.016791,6.22919142772105
-c8,2551.85474,742.825278,310.277625,537.313205,264.708556,4.712122,4411.691526,10.6519515260426
-c9,2551.85474,7465.49,3726.01,478.667351,264.708556,4.712122,14491.442769,3.24281893616869
-c10,2551.85474,7465.49,300.881147,478.667351,272.76268,4.712122,11074.36804,4.2434136877086
-c11,2551.85474,742.825278,300.881147,478.667351,3304.362,4.712122,7383.302638,6.36478375810573
-c12,2551.85474,7465.49,402.949446,406.082535,313.098433,4.712122,11144.187276,4.21682835746318
-c13,2551.85474,553.269866,300.881147,478.667351,264.708556,4.712122,4154.093782,11.3124851490778
-c14,2551.85474,553.269866,300.881147,537.313205,352.195669,4.712122,4300.226749,10.9280572858448
-c15,2551.85474,553.269866,300.881147,4576.494,272.76268,4.712122,8259.974555,5.68925781383042
-c16,2551.85474,742.825278,402.949446,537.313205,3304.362,4.712122,7544.016791,6.22919142772105
-c17,2551.85474,742.825278,310.277625,537.313205,264.708556,4.712122,4411.691526,10.6519515260426
-c18,2551.85474,7465.49,3726.01,478.667351,264.708556,4.712122,14491.442769,3.24281893616869
-c19,2551.85474,553.269866,3726.01,4576.494,352.195669,4.712122,11764.536397,3.994473166026
-c20,2551.85474,7465.49,300.881147,478.667351,272.76268,4.712122,11074.36804,4.2434136877086
-c21,2551.85474,7465.49,3726.01,4576.494,3304.362,4.712122,21628.922862,2.17269835537176
-c22,2551.85474,742.825278,300.881147,478.667351,3304.362,4.712122,7383.302638,6.36478375810573
-c23,2551.85474,7465.49,300.881147,478.667351,272.76268,4.712122,11074.36804,4.2434136877086
-c24,2551.85474,553.269866,300.881147,478.667351,264.708556,4.712122,4154.093782,11.3124851490778
-c25,2551.85474,742.825278,310.277625,537.313205,264.708556,4.712122,4411.691526,10.6519515260426
-c26,2551.85474,742.825278,402.949446,537.313205,3304.362,4.712122,7544.016791,6.22919142772105
-c27,2551.85474,7465.49,3726.01,478.667351,264.708556,4.712122,14491.442769,3.24281893616869
-c28,2551.85474,7465.49,402.949446,406.082535,313.098433,4.712122,11144.187276,4.21682835746318
-c29,2551.85474,553.269866,300.881147,4576.494,272.76268,4.712122,8259.974555,5.68925781383042
-c30,2551.85474,553.269866,300.881147,537.313205,352.195669,4.712122,4300.226749,10.9280572858448
-c4,4154.093782
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,1.216143,1.216143,0
-c2,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c3,0,0,175.390266,229.498883,151.959867,1.216143,558.065159,0
-c4,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c5,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c6,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c7,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c8,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c9,0,0,0,229.498883,151.959867,1.216143,382.674893,0
-c10,0,0,175.390266,229.498883,151.959867,1.216143,558.065159,0
-c11,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c12,0,0,175.390266,229.498883,151.959867,1.216143,558.065159,0
-c13,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c14,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c15,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c16,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c17,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c18,0,0,0,229.498883,151.959867,1.216143,382.674893,0
-c19,0,324.065467,0,0,151.959867,1.216143,477.241477,0
-c20,0,0,175.390266,229.498883,151.959867,1.216143,558.065159,0
-c21,0,0,0,0,0,1.216143,1.216143,0
-c22,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c23,0,0,175.390266,229.498883,151.959867,1.216143,558.065159,0
-c24,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c25,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c26,0,324.065467,175.390266,229.498883,0,1.216143,730.170759,0
-c27,0,0,0,229.498883,151.959867,1.216143,382.674893,0
-c28,0,0,175.390266,229.498883,151.959867,1.216143,558.065159,0
-c29,0,324.065467,175.390266,0,151.959867,1.216143,652.631743,0
-c30,0,324.065467,175.390266,229.498883,151.959867,1.216143,882.130626,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,3.321408,3.321408,0
-c2,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c3,0,0,44.260116,72.144097,43.122319,3.321408,162.84794,0
-c4,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c5,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c6,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c7,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c8,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c9,0,0,0,72.144097,43.122319,3.321408,118.587824,0
-c10,0,0,44.260116,72.144097,43.122319,3.321408,162.84794,0
-c11,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c12,0,0,44.260116,72.144097,43.122319,3.321408,162.84794,0
-c13,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c14,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c15,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c16,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c17,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c18,0,0,0,72.144097,43.122319,3.321408,118.587824,0
-c19,0,78.347264,0,0,43.122319,3.321408,124.790991,0
-c20,0,0,44.260116,72.144097,43.122319,3.321408,162.84794,0
-c21,0,0,0,0,0,3.321408,3.321408,0
-c22,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c23,0,0,44.260116,72.144097,43.122319,3.321408,162.84794,0
-c24,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c25,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c26,0,78.347264,44.260116,72.144097,0,3.321408,198.072885,0
-c27,0,0,0,72.144097,43.122319,3.321408,118.587824,0
-c28,0,0,44.260116,72.144097,43.122319,3.321408,162.84794,0
-c29,0,78.347264,44.260116,0,43.122319,3.321408,169.051107,0
-c30,0,78.347264,44.260116,72.144097,43.122319,3.321408,241.195204,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0.701660,0.70166,0
-c2,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c3,0,0,19.907558,29.286620,18.163317,0.701660,68.059155,0
-c4,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c5,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c6,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c7,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c8,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c9,0,0,0,29.286620,18.163317,0.701660,48.151597,0
-c10,0,0,19.907558,29.286620,18.163317,0.701660,68.059155,0
-c11,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c12,0,0,19.907558,29.286620,18.163317,0.701660,68.059155,0
-c13,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c14,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c15,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c16,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c17,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c18,0,0,0,29.286620,18.163317,0.701660,48.151597,0
-c19,0,36.608647,0,0,18.163317,0.701660,55.473624,0
-c20,0,0,19.907558,29.286620,18.163317,0.701660,68.059155,0
-c21,0,0,0,0,0,0.701660,0.70166,0
-c22,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c23,0,0,19.907558,29.286620,18.163317,0.701660,68.059155,0
-c24,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c25,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c26,0,36.608647,19.907558,29.286620,0,0.701660,86.504485,0
-c27,0,0,0,29.286620,18.163317,0.701660,48.151597,0
-c28,0,0,19.907558,29.286620,18.163317,0.701660,68.059155,0
-c29,0,36.608647,19.907558,0,18.163317,0.701660,75.381182,0
-c30,0,36.608647,19.907558,29.286620,18.163317,0.701660,104.667802,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,6.60174,0,0,0,0,0,6.60174,0
-c2,6.60174,0,0,0,0,0,6.60174,0
-c3,6.60174,0,0,0,0,0,6.60174,0
-c4,6.60174,0,0,0,0,0,6.60174,0
-c5,6.60174,0,0,0,0,0,6.60174,0
-c6,6.60174,0,0,0,0,0,6.60174,0
-c7,6.60174,0,0,0,0,0,6.60174,0
-c8,6.60174,0,0,0,0,0,6.60174,0
-c9,6.60174,0,0,0,0,0,6.60174,0
-c10,6.60174,0,0,0,0,0,6.60174,0
-c11,6.60174,0,0,0,0,0,6.60174,0
-c12,6.60174,0,0,0,0,0,6.60174,0
-c13,6.60174,0,0,0,0,0,6.60174,0
-c14,6.60174,0,0,0,0,0,6.60174,0
-c15,6.60174,0,0,0,0,0,6.60174,0
-c16,6.60174,0,0,0,0,0,6.60174,0
-c17,6.60174,0,0,0,0,0,6.60174,0
-c18,6.60174,0,0,0,0,0,6.60174,0
-c19,6.60174,0,0,0,0,0,6.60174,0
-c20,6.60174,0,0,0,0,0,6.60174,0
-c21,6.60174,0,0,0,0,0,6.60174,0
-c22,6.60174,0,0,0,0,0,6.60174,0
-c23,6.60174,0,0,0,0,0,6.60174,0
-c24,6.60174,0,0,0,0,0,6.60174,0
-c25,6.60174,0,0,0,0,0,6.60174,0
-c26,6.60174,0,0,0,0,0,6.60174,0
-c27,6.60174,0,0,0,0,0,6.60174,0
-c28,6.60174,0,0,0,0,0,6.60174,0
-c29,6.60174,0,0,0,0,0,6.60174,0
-c30,6.60174,0,0,0,0,0,6.60174,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,5.44249,0,0,0,0,0,5.44249,0
-c2,5.44249,0,0,0,0,0,5.44249,0
-c3,5.44249,0,0,0,0,0,5.44249,0
-c4,5.44249,0,0,0,0,0,5.44249,0
-c5,5.44249,0,0,0,0,0,5.44249,0
-c6,5.44249,0,0,0,0,0,5.44249,0
-c7,5.44249,0,0,0,0,0,5.44249,0
-c8,5.44249,0,0,0,0,0,5.44249,0
-c9,5.44249,0,0,0,0,0,5.44249,0
-c10,5.44249,0,0,0,0,0,5.44249,0
-c11,5.44249,0,0,0,0,0,5.44249,0
-c12,5.44249,0,0,0,0,0,5.44249,0
-c13,5.44249,0,0,0,0,0,5.44249,0
-c14,5.44249,0,0,0,0,0,5.44249,0
-c15,5.44249,0,0,0,0,0,5.44249,0
-c16,5.44249,0,0,0,0,0,5.44249,0
-c17,5.44249,0,0,0,0,0,5.44249,0
-c18,5.44249,0,0,0,0,0,5.44249,0
-c19,5.44249,0,0,0,0,0,5.44249,0
-c20,5.44249,0,0,0,0,0,5.44249,0
-c21,5.44249,0,0,0,0,0,5.44249,0
-c22,5.44249,0,0,0,0,0,5.44249,0
-c23,5.44249,0,0,0,0,0,5.44249,0
-c24,5.44249,0,0,0,0,0,5.44249,0
-c25,5.44249,0,0,0,0,0,5.44249,0
-c26,5.44249,0,0,0,0,0,5.44249,0
-c27,5.44249,0,0,0,0,0,5.44249,0
-c28,5.44249,0,0,0,0,0,5.44249,0
-c29,5.44249,0,0,0,0,0,5.44249,0
-c30,5.44249,0,0,0,0,0,5.44249,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1007.11399,1366.6231,653.4248,686.8162,510.1917,0.79176,4224.96155,1.67700060188712
-c2,1007.11399,218.942215,116.09111,157.531356,510.1917,0.79176,2010.662131,3.52384558717063
-c3,1007.11399,1366.6231,116.09111,157.531356,100.738165,0.79176,2748.889481,2.57750012185739
-c4,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c5,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c6,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c7,1007.11399,218.942215,116.09111,157.531356,510.1917,0.79176,2010.662131,3.52384558717063
-c8,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c9,1007.11399,1366.6231,653.4248,157.531356,100.738165,0.79176,3286.223171,2.15605047061941
-c10,1007.11399,1366.6231,116.09111,157.531356,100.738165,0.79176,2748.889481,2.57750012185739
-c11,1007.11399,218.942215,116.09111,157.531356,510.1917,0.79176,2010.662131,3.52384558717063
-c12,1007.11399,1366.6231,116.09111,157.531356,100.738165,0.79176,2748.889481,2.57750012185739
-c13,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c14,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c15,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c16,1007.11399,218.942215,116.09111,157.531356,510.1917,0.79176,2010.662131,3.52384558717063
-c17,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c18,1007.11399,1366.6231,653.4248,157.531356,100.738165,0.79176,3286.223171,2.15605047061941
-c19,1007.11399,218.942215,653.4248,686.8162,100.738165,0.79176,2667.82713,2.65581786943527
-c20,1007.11399,1366.6231,116.09111,157.531356,100.738165,0.79176,2748.889481,2.57750012185739
-c21,1007.11399,1366.6231,653.4248,686.8162,510.1917,0.79176,4224.96155,1.67700060188712
-c22,1007.11399,218.942215,116.09111,157.531356,510.1917,0.79176,2010.662131,3.52384558717063
-c23,1007.11399,1366.6231,116.09111,157.531356,100.738165,0.79176,2748.889481,2.57750012185739
-c24,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c25,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c26,1007.11399,218.942215,116.09111,157.531356,510.1917,0.79176,2010.662131,3.52384558717063
-c27,1007.11399,1366.6231,653.4248,157.531356,100.738165,0.79176,3286.223171,2.15605047061941
-c28,1007.11399,1366.6231,116.09111,157.531356,100.738165,0.79176,2748.889481,2.57750012185739
-c29,1007.11399,218.942215,116.09111,686.8162,100.738165,0.79176,2130.49344,3.32564408057299
-c30,1007.11399,218.942215,116.09111,157.531356,100.738165,0.79176,1601.208596,4.42494675909504
-c4,1601.208596
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_results2_nodma.csv b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_results2_nodma.csv
deleted file mode 100644
index bdc6580f750a3f05adaa9913814b04efb45265bb..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_results2_nodma.csv
+++ /dev/null
@@ -1,385 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2545.253,7465.49,3726.01,4576.494,3304.362,0.174571,21617.783571,2.17381791136344
-c2,2545.253,340.412547,81.230765,177.024371,3304.362,0.174571,6448.457254,7.28749881843446
-c3,2545.253,7465.49,183.299064,104.439555,118.016247,0.174571,10416.672437,4.51133749103473
-c4,2545.253,150.857135,81.230765,177.024371,69.626370,0.174571,3024.166212,15.5392000636769
-c5,2545.253,150.857135,81.230765,235.670225,157.113483,0.174571,3170.299179,14.8229303331965
-c6,2545.253,150.857135,81.230765,4576.494,77.680494,0.174571,7431.689965,6.3233429996384
-c7,2545.253,340.412547,183.299064,235.670225,3304.362,0.174571,6609.171407,7.11028989007593
-c8,2545.253,340.412547,90.627243,235.670225,69.626370,0.174571,3281.763956,14.3194710363419
-c9,2545.253,7465.49,3726.01,177.024371,69.626370,0.174571,13983.578312,3.36059368807006
-c10,2545.253,7465.49,81.230765,177.024371,77.680494,0.174571,10346.853201,4.5417794164974
-c11,2545.253,340.412547,81.230765,177.024371,3304.362,0.174571,6448.457254,7.28749881843446
-c12,2545.253,7465.49,183.299064,104.439555,118.016247,0.174571,10416.672437,4.51133749103473
-c13,2545.253,150.857135,81.230765,177.024371,69.626370,0.174571,3024.166212,15.5392000636769
-c14,2545.253,150.857135,81.230765,235.670225,157.113483,0.174571,3170.299179,14.8229303331965
-c15,2545.253,150.857135,81.230765,4576.494,77.680494,0.174571,7431.689965,6.3233429996384
-c16,2545.253,340.412547,183.299064,235.670225,3304.362,0.174571,6609.171407,7.11028989007593
-c17,2545.253,340.412547,90.627243,235.670225,69.626370,0.174571,3281.763956,14.3194710363419
-c18,2545.253,7465.49,3726.01,177.024371,69.626370,0.174571,13983.578312,3.36059368807006
-c19,2545.253,150.857135,3726.01,4576.494,157.113483,0.174571,11155.902189,4.21240022820354
-c20,2545.253,7465.49,81.230765,177.024371,77.680494,0.174571,10346.853201,4.5417794164974
-c21,2545.253,7465.49,3726.01,4576.494,3304.362,0.174571,21617.783571,2.17381791136344
-c22,2545.253,340.412547,81.230765,177.024371,3304.362,0.174571,6448.457254,7.28749881843446
-c23,2545.253,7465.49,81.230765,177.024371,77.680494,0.174571,10346.853201,4.5417794164974
-c24,2545.253,150.857135,81.230765,177.024371,69.626370,0.174571,3024.166212,15.5392000636769
-c25,2545.253,340.412547,90.627243,235.670225,69.626370,0.174571,3281.763956,14.3194710363419
-c26,2545.253,340.412547,183.299064,235.670225,3304.362,0.174571,6609.171407,7.11028989007593
-c27,2545.253,7465.49,3726.01,177.024371,69.626370,0.174571,13983.578312,3.36059368807006
-c28,2545.253,7465.49,183.299064,104.439555,118.016247,0.174571,10416.672437,4.51133749103473
-c29,2545.253,150.857135,81.230765,4576.494,77.680494,0.174571,7431.689965,6.3233429996384
-c30,2545.253,150.857135,81.230765,235.670225,157.113483,0.174571,3170.299179,14.8229303331965
-c4,3024.166212
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1001.6715,1366.6231,653.4248,686.8162,510.1917,0.090100,4218.8174,1.67944293157976
-c2,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c3,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c4,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c5,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c6,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c7,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c8,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c9,1001.6715,1366.6231,653.4248,128.244736,82.574848,0.090100,3232.629084,2.19179584997523
-c10,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c11,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c12,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c13,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c14,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c15,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c16,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c17,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c18,1001.6715,1366.6231,653.4248,128.244736,82.574848,0.090100,3232.629084,2.19179584997523
-c19,1001.6715,182.333568,653.4248,686.8162,82.574848,0.090100,2606.911016,2.7178767954588
-c20,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c21,1001.6715,1366.6231,653.4248,686.8162,510.1917,0.090100,4218.8174,1.67944293157976
-c22,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c23,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c24,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c25,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c26,1001.6715,182.333568,96.183552,128.244736,510.1917,0.090100,1918.715156,3.69271219783328
-c27,1001.6715,1366.6231,653.4248,128.244736,82.574848,0.090100,3232.629084,2.19179584997523
-c28,1001.6715,1366.6231,96.183552,128.244736,82.574848,0.090100,2675.387836,2.64831246888003
-c29,1001.6715,182.333568,96.183552,686.8162,82.574848,0.090100,2049.669768,3.45678264613099
-c30,1001.6715,182.333568,96.183552,128.244736,82.574848,0.090100,1491.098304,4.7517073393635
-c4,1491.098304
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,5417.213,16332.387,7661.092,10086.008,7470.956,25.469348,46993.125348,0.999999997872029
-c1,2551.85474,7465.49,3726.01,4576.494,3304.362,16.043622,21640.254362,2.1715606639709
-c2,2551.85474,2522.2367884,673.629796,1223.341402,3492.309,20.472922,10483.8446484,4.48243239725311
-c3,2551.85474,7465.49,823.65976,1152.230657,792.0937522,4.712122,12790.0410312,3.6741965773171
-c4,2551.85474,2329.095086,673.100225,1223.479574,742.023533,4.712122,7524.26528,6.24554331548577
-c5,2551.85474,2338.258747,670.3014,1284.450006,831.495355,4.712122,7681.07237,6.11804217855531
-c6,2551.85474,2337.3387909,672.241926,4877.603,829.051566,4.712122,11272.8021449,4.16871726542178
-c7,2551.85474,2528.1343714,772.894637,1280.10994,3492.938,20.926922,10646.8586104,4.41380191343165
-c8,2551.85474,2526.3911951,679.672822,1281.002078,743.825003,4.712122,7787.4579601,6.03446272009798
-c9,2551.85474,7465.49,3726.01,1336.573655,739.8828856,4.712122,15824.5234026,2.96963920210798
-c10,2551.85474,7465.49,722.391922,1221.426035,749.227459,4.712122,12715.102278,3.69585111869085
-c11,2551.85474,2522.7929962,670.017717,1221.431058,3492.164,20.885922,10479.1464332,4.48444204870278
-c12,2551.85474,7465.49,821.940336,1148.492297,789.4342137,4.712122,12781.9237087,3.67652992235912
-c13,2551.85474,2337.0326585,667.777038,1223.584497,741.630288,4.712122,7526.5913435,6.24361315487417
-c14,2551.85474,2337.4752086,671.388474,1280.80088,830.174596,4.712122,7676.4060206,6.12176122650568
-c15,2551.85474,2337.069006,670.215899,4883.073,826.7645851,4.712122,11273.6893521,4.16838919926488
-c16,2551.85474,2523.8232109,770.740344,1280.660301,3492.395,20.932222,10640.4058179,4.41647862972455
-c17,2551.85474,2526.394765,681.279181,1284.204247,743.200746,4.712122,7791.645801,6.03121932709606
-c18,2551.85474,7465.49,3726.01,1336.035191,740.477526,4.712122,15824.579579,2.96962866004979
-c19,2551.85474,2336.5105736,3863.721,4576.494,902.097102,4.712122,14235.3895376,3.30114781149909
-c20,2551.85474,7465.49,718.9195146,1221.884731,748.256968,4.712122,12711.1180756,3.69700955484837
-c21,2551.85474,7465.49,3726.01,4576.494,3304.362,19.515522,21643.726262,2.17121232092945
-c22,2551.85474,2523.938098,669.424811,1219.711329,3491.85,20.798522,10477.5775,4.48511355792774
-c23,2551.85474,7465.49,716.178914,1222.588198,747.1898741,4.712122,12708.0138481,3.69791263528051
-c24,2551.85474,2337.0739621,669.25028,1220.715474,741.1878024,4.712122,7524.7943805,6.24510416460935
-c25,2551.85474,2526.9573985,679.479385,1280.319629,742.5067267,4.712122,7785.8300012,6.03572448116446
-c26,2551.85474,2524.6402554,769.500432,1279.196703,3491.59,20.709222,10637.4913524,4.41768865886116
-c27,2551.85474,7465.49,3726.01,1331.321972,739.463611,4.712122,15818.852445,2.9707037987944
-c28,2551.85474,7465.49,820.2617391,1147.969732,787.5597797,4.712122,12777.8481128,3.67770258069942
-c29,2551.85474,2336.5083139,668.332929,4875.324,821.0412029,4.712122,11257.7733078,4.17428239543715
-c30,2551.85474,2338.0065652,667.220353,1277.851397,826.24176,4.712122,7665.8869372,6.13016146989357
-c4,7524.26528
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,1.216143,1.216143,0
-c2,0,421.149947,200.145258,277.244900,0,1.216143,899.756248,0
-c3,0,0,200.145258,277.244900,184.944713,1.216143,663.551014,0
-c4,0,421.149947,200.145258,277.244900,184.944713,1.216143,1084.700961,0
-c5,0,421.149947,200.145258,277.244900,184.944713,1.216143,1084.700961,0
-c6,0,421.149947,200.145258,0,184.944713,1.216143,807.456061,0
-c7,0,421.149947,200.145258,277.244900,0,1.216143,899.756248,0
-c8,0,421.149947,200.145258,277.244900,184.944713,1.216143,1084.700961,0
-c9,0,0,0,277.244900,184.944713,1.216143,463.405756,0
-c10,0,0,200.145258,277.244900,184.944713,1.216143,663.551014,0
-c11,0,421.149947,200.145258,277.244900,0,1.216143,899.756248,0
-c12,0,0,200.145258,277.244900,184.944713,1.216143,663.551014,0
-c13,0,421.149947,200.145258,277.244900,184.944713,1.216143,1084.700961,0
-c14,0,421.149947,200.145258,277.244900,184.944713,1.216143,1084.700961,0
-c15,0,421.149947,200.145258,0,184.944713,1.216143,807.456061,0
-c16,0,421.149947,200.145258,277.244900,0,1.216143,899.756248,0
-c17,0,421.149947,200.145258,277.244900,184.944713,1.216143,1084.700961,0
-c18,0,0,0,277.244900,184.944713,1.216143,463.405756,0
-c19,0,421.149947,0,0,184.944713,1.216143,607.310803,0
-c20,0,0,200.145258,277.244900,184.944713,1.216143,663.551014,0
-c21,0,0,0,0,0,1.216143,1.216143,0
-c22,0,421.149947,200.145258,277.244900,0,1.216143,899.756248,0
-c23,0,0,200.145258,277.244900,184.944713,1.216143,663.551014,0
-c24,0,421.149947,200.145258,277.244900,184.944713,1.216143,1084.700961,0
-c25,0,421.149947,200.145258,277.244900,184.944713,1.216143,1084.700961,0
-c26,0,421.149947,200.145258,277.244900,0,1.216143,899.756248,0
-c27,0,0,0,277.244900,184.944713,1.216143,463.405756,0
-c28,0,0,200.145258,277.244900,184.944713,1.216143,663.551014,0
-c29,0,421.149947,200.145258,0,184.944713,1.216143,807.456061,0
-c30,0,421.149947,200.145258,277.244900,184.944713,1.216143,1084.700961,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,3.321408,3.321408,0
-c2,0,393.288704,122.995476,229.614817,0,3.321408,749.220405,0
-c3,0,0,122.995476,229.614817,148.102799,3.321408,504.0345,0
-c4,0,393.288704,122.995476,229.614817,148.102799,3.321408,897.323204,0
-c5,0,393.288704,122.995476,229.614817,148.102799,3.321408,897.323204,0
-c6,0,393.288704,122.995476,0,148.102799,3.321408,667.708387,0
-c7,0,393.288704,122.995476,229.614817,0,3.321408,749.220405,0
-c8,0,393.288704,122.995476,229.614817,148.102799,3.321408,897.323204,0
-c9,0,0,0,229.614817,148.102799,3.321408,381.039024,0
-c10,0,0,122.995476,229.614817,148.102799,3.321408,504.0345,0
-c11,0,393.288704,122.995476,229.614817,0,3.321408,749.220405,0
-c12,0,0,122.995476,229.614817,148.102799,3.321408,504.0345,0
-c13,0,393.288704,122.995476,229.614817,148.102799,3.321408,897.323204,0
-c14,0,393.288704,122.995476,229.614817,148.102799,3.321408,897.323204,0
-c15,0,393.288704,122.995476,0,148.102799,3.321408,667.708387,0
-c16,0,393.288704,122.995476,229.614817,0,3.321408,749.220405,0
-c17,0,393.288704,122.995476,229.614817,148.102799,3.321408,897.323204,0
-c18,0,0,0,229.614817,148.102799,3.321408,381.039024,0
-c19,0,393.288704,0,0,148.102799,3.321408,544.712911,0
-c20,0,0,122.995476,229.614817,148.102799,3.321408,504.0345,0
-c21,0,0,0,0,0,3.321408,3.321408,0
-c22,0,393.288704,122.995476,229.614817,0,3.321408,749.220405,0
-c23,0,0,122.995476,229.614817,148.102799,3.321408,504.0345,0
-c24,0,393.288704,122.995476,229.614817,148.102799,3.321408,897.323204,0
-c25,0,393.288704,122.995476,229.614817,148.102799,3.321408,897.323204,0
-c26,0,393.288704,122.995476,229.614817,0,3.321408,749.220405,0
-c27,0,0,0,229.614817,148.102799,3.321408,381.039024,0
-c28,0,0,122.995476,229.614817,148.102799,3.321408,504.0345,0
-c29,0,393.288704,122.995476,0,148.102799,3.321408,667.708387,0
-c30,0,393.288704,122.995476,229.614817,148.102799,3.321408,897.323204,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0.701660,0.70166,0
-c2,0,102.144647,36.291558,62.054620,0,0.701660,201.192485,0
-c3,0,0,36.291558,62.054620,40.008650,0.701660,139.056488,0
-c4,0,102.144647,36.291558,62.054620,40.008650,0.701660,241.201135,0
-c5,0,102.144647,36.291558,62.054620,40.008650,0.701660,241.201135,0
-c6,0,102.144647,36.291558,0,40.008650,0.701660,179.146515,0
-c7,0,102.144647,36.291558,62.054620,0,0.701660,201.192485,0
-c8,0,102.144647,36.291558,62.054620,40.008650,0.701660,241.201135,0
-c9,0,0,0,62.054620,40.008650,0.701660,102.76493,0
-c10,0,0,36.291558,62.054620,40.008650,0.701660,139.056488,0
-c11,0,102.144647,36.291558,62.054620,0,0.701660,201.192485,0
-c12,0,0,36.291558,62.054620,40.008650,0.701660,139.056488,0
-c13,0,102.144647,36.291558,62.054620,40.008650,0.701660,241.201135,0
-c14,0,102.144647,36.291558,62.054620,40.008650,0.701660,241.201135,0
-c15,0,102.144647,36.291558,0,40.008650,0.701660,179.146515,0
-c16,0,102.144647,36.291558,62.054620,0,0.701660,201.192485,0
-c17,0,102.144647,36.291558,62.054620,40.008650,0.701660,241.201135,0
-c18,0,0,0,62.054620,40.008650,0.701660,102.76493,0
-c19,0,102.144647,0,0,40.008650,0.701660,142.854957,0
-c20,0,0,36.291558,62.054620,40.008650,0.701660,139.056488,0
-c21,0,0,0,0,0,0.701660,0.70166,0
-c22,0,102.144647,36.291558,62.054620,0,0.701660,201.192485,0
-c23,0,0,36.291558,62.054620,40.008650,0.701660,139.056488,0
-c24,0,102.144647,36.291558,62.054620,40.008650,0.701660,241.201135,0
-c25,0,102.144647,36.291558,62.054620,40.008650,0.701660,241.201135,0
-c26,0,102.144647,36.291558,62.054620,0,0.701660,201.192485,0
-c27,0,0,0,62.054620,40.008650,0.701660,102.76493,0
-c28,0,0,36.291558,62.054620,40.008650,0.701660,139.056488,0
-c29,0,102.144647,36.291558,0,40.008650,0.701660,179.146515,0
-c30,0,102.144647,36.291558,62.054620,40.008650,0.701660,241.201135,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,1196.1747904,238.515497,521.684414,0,0,1956.3747014,0
-c3,0,0,236.445362,523.058785,322.8415932,0,1082.3457402,0
-c4,0,1194.5813,238.150626,521.636186,320.960251,0,2275.328363,0
-c5,0,1197.451061,235.307601,523.633564,323.27086,0,2279.663086,0
-c6,0,1197.3483049,237.033027,0,324.10256,0,1758.4838919,0
-c7,0,1198.2328734,235.765639,519.301098,0,0,1953.2996104,0
-c8,0,1199.0089971,235.690245,520.781736,323.136621,0,2278.6175991,0
-c9,0,0,0,520.743567,319.2947036,0,840.0382706,0
-c10,0,0,234.667623,519.775147,320.329953,0,1074.772723,0
-c11,0,1194.9892982,234.868518,519.37237,0,0,1949.2301862,0
-c12,0,0,233.913838,519.200225,320.5362547,0,1073.6503177,0
-c13,0,1197.6845725,232.848339,521.412309,320.835906,0,2272.7811265,0
-c14,0,1196.5545226,236.270075,520.184738,321.883301,0,2274.8926366,0
-c15,0,1197.76962,235.32,0,321.0015791,0,1754.0911991,0
-c16,0,1196.1132129,233.776746,520.229259,0,0,1950.1192179,0
-c17,0,1197.961867,237.070404,523.800605,322.661664,0,2281.49454,0
-c18,0,0,0,520.870003,319.940244,0,840.810247,0
-c19,0,1197.7193876,0,0,320.189107,0,1517.9084946,0
-c20,0,0,232.0296156,520.170043,319.681662,0,1071.8813206,0
-c21,0,0,0,0,0,0,0,0
-c22,0,1198.27,235.041712,517.850241,0,0,1951.161953,0
-c23,0,0,230.770615,520.30101,318.1976681,0,1069.2692931,0
-c24,0,1197.9032761,234.173681,519.397086,320.5284204,0,2272.0024635,0
-c25,0,1198.5120005,234.910708,519.968287,321.7786447,0,2275.1696402,0
-c26,0,1194.4315574,232.473034,519.043561,0,0,1945.9481524,0
-c27,0,0,0,519.285984,318.598729,0,837.884713,0
-c28,0,0,230.8411411,518.47986,318.6336207,0,1067.9546218,0
-c29,0,1198.9265279,233.63753,0,317.9858969,0,1750.5499548,0
-c30,0,1198.0251792,233.028754,517.462755,318.097265,0,2266.6139532,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,6.60174,0,0,0,0,11.3315,17.93324,0
-c2,6.60174,69.6338,0,0,187.947,15.7608,279.94334,0
-c3,6.60174,0,50.47,0,0,0,57.07174,0
-c4,6.60174,70.7919,0,0,0,0,77.39364,0
-c5,6.60174,73.1999,0,0,0,0,79.80164,0
-c6,6.60174,71.3667,0,301.109,75.9741,0,455.05154,0
-c7,6.60174,72.3373,0,0,188.576,16.2148,283.72984,0
-c8,6.60174,69.727,0,0,0,0,76.32874,0
-c9,6.60174,0,0,113.939,0,0,120.54074,0
-c10,6.60174,0,52.3655,0,0,0,58.96724,0
-c11,6.60174,71.6995,0,0,187.802,16.1738,282.27704,0
-c12,6.60174,0,50.6831,0,0,0,57.28484,0
-c13,6.60174,71.5133,0,0,0,0,78.11504,0
-c14,6.60174,72.0479,0,0,0,0,78.64964,0
-c15,6.60174,70.9476,0,306.579,76.848,0,460.97634,0
-c16,6.60174,71.5908,0,0,188.033,16.2201,282.44564,0
-c17,6.60174,69.6907,0,0,0,0,76.29244,0
-c18,6.60174,0,0,113.477,0,0,120.07874,0
-c19,6.60174,71.5704,137.711,0,73.7108,0,289.59394,0
-c20,6.60174,0,51.7873,0,0,0,58.38904,0
-c21,6.60174,0,0,0,0,14.8034,21.40514,0
-c22,6.60174,69.3249,0,0,187.488,16.0864,279.50104,0
-c23,6.60174,0,50.3426,0,0,0,56.94434,0
-c24,6.60174,71.2709,0,0,0,0,77.87264,0
-c25,6.60174,71.1902,0,0,0,0,77.79194,0
-c26,6.60174,71.5955,0,0,187.228,15.9971,281.42234,0
-c27,6.60174,0,0,110.437,0,0,117.03874,0
-c28,6.60174,0,51.6476,0,0,0,58.24934,0
-c29,6.60174,70.621,0,298.83,74.405,0,450.45774,0
-c30,6.60174,71.4056,0,0,0,0,78.00734,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,5.44249,0,0,0,0,0,5.44249,0
-c2,5.44249,0,0,0,84.5332,0,89.97569,0
-c3,5.44249,0,0,0,0,0,5.44249,0
-c4,5.44249,0,0,0,0,0,5.44249,0
-c5,5.44249,0,0,0,0,0,5.44249,0
-c6,5.44249,0,0,126.79,0,0,132.23249,0
-c7,5.44249,0,0,0,84.8673,0,90.30979,0
-c8,5.44249,0,0,0,0,0,5.44249,0
-c9,5.44249,0,0,0,0,0,5.44249,0
-c10,5.44249,0,0,0,0,0,5.44249,0
-c11,5.44249,0,0,0,84.4277,0,89.87019,0
-c12,5.44249,0,0,0,0,0,5.44249,0
-c13,5.44249,0,0,0,0,0,5.44249,0
-c14,5.44249,0,0,0,0,0,5.44249,0
-c15,5.44249,0,0,126.687,0,0,132.12949,0
-c16,5.44249,0,0,0,84.6577,0,90.10019,0
-c17,5.44249,0,0,0,0,0,5.44249,0
-c18,5.44249,0,0,0,0,0,5.44249,0
-c19,5.44249,0,63.5646,0,0,0,69.00709,0
-c20,5.44249,0,0,0,0,0,5.44249,0
-c21,5.44249,0,0,0,0,0,5.44249,0
-c22,5.44249,0,0,0,84.4342,0,89.87669,0
-c23,5.44249,0,0,0,0,0,5.44249,0
-c24,5.44249,0,0,0,0,0,5.44249,0
-c25,5.44249,0,0,0,0,0,5.44249,0
-c26,5.44249,0,0,0,84.637,0,90.07949,0
-c27,5.44249,0,0,0,0,0,5.44249,0
-c28,5.44249,0,0,0,0,0,5.44249,0
-c29,5.44249,0,0,126.691,0,0,132.13349,0
-c30,5.44249,0,0,0,0,0,5.44249,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,1291.7259,2407.5018,1116.3819,1293.3931,972.3579,3.90263,7085.26323,0.999999985886199
-c1,1007.11399,1366.6231,653.4248,686.8162,510.1917,0.79176,4224.96155,1.67700060188712
-c2,1007.11399,284.478215,132.47511,190.299356,594.7249,0.79176,2209.883331,3.20617057470485
-c3,1007.11399,1366.6231,132.47511,190.299356,122.583498,0.79176,2819.886814,2.51260545053191
-c4,1007.11399,284.478215,132.47511,190.299356,122.583498,0.79176,1737.741929,4.07728138685653
-c5,1007.11399,284.478215,132.47511,190.299356,122.583498,0.79176,1737.741929,4.07728138685653
-c6,1007.11399,284.478215,132.47511,813.6062,122.583498,0.79176,2361.048773,3.00089647064243
-c7,1007.11399,284.478215,132.47511,190.299356,595.059,0.79176,2210.217431,3.2056859248575
-c8,1007.11399,284.478215,132.47511,190.299356,122.583498,0.79176,1737.741929,4.07728138685653
-c9,1007.11399,1366.6231,653.4248,190.299356,122.583498,0.79176,3340.836504,2.12080507664361
-c10,1007.11399,1366.6231,132.47511,190.299356,122.583498,0.79176,2819.886814,2.51260545053191
-c11,1007.11399,284.478215,132.47511,190.299356,594.6194,0.79176,2209.777831,3.20632364483506
-c12,1007.11399,1366.6231,132.47511,190.299356,122.583498,0.79176,2819.886814,2.51260545053191
-c13,1007.11399,284.478215,132.47511,190.299356,122.583498,0.79176,1737.741929,4.07728138685653
-c14,1007.11399,284.478215,132.47511,190.299356,122.583498,0.79176,1737.741929,4.07728138685653
-c15,1007.11399,284.478215,132.47511,813.5032,122.583498,0.79176,2360.945773,3.0010273895
-c16,1007.11399,284.478215,132.47511,190.299356,594.8494,0.79176,2210.007831,3.20598995624147
-c17,1007.11399,284.478215,132.47511,190.299356,122.583498,0.79176,1737.741929,4.07728138685653
-c18,1007.11399,1366.6231,653.4248,190.299356,122.583498,0.79176,3340.836504,2.12080507664361
-c19,1007.11399,284.478215,716.9894,686.8162,122.583498,0.79176,2818.773063,2.51359822883343
-c20,1007.11399,1366.6231,132.47511,190.299356,122.583498,0.79176,2819.886814,2.51260545053191
-c21,1007.11399,1366.6231,653.4248,686.8162,510.1917,0.79176,4224.96155,1.67700060188712
-c22,1007.11399,284.478215,132.47511,190.299356,594.6259,0.79176,2209.784331,3.20631421355145
-c23,1007.11399,1366.6231,132.47511,190.299356,122.583498,0.79176,2819.886814,2.51260545053191
-c24,1007.11399,284.478215,132.47511,190.299356,122.583498,0.79176,1737.741929,4.07728138685653
-c25,1007.11399,284.478215,132.47511,190.299356,122.583498,0.79176,1737.741929,4.07728138685653
-c26,1007.11399,284.478215,132.47511,190.299356,594.8287,0.79176,2209.987131,3.20601998537068
-c27,1007.11399,1366.6231,653.4248,190.299356,122.583498,0.79176,3340.836504,2.12080507664361
-c28,1007.11399,1366.6231,132.47511,190.299356,122.583498,0.79176,2819.886814,2.51260545053191
-c29,1007.11399,284.478215,132.47511,813.5072,122.583498,0.79176,2360.949773,3.00102230505933
-c30,1007.11399,284.478215,132.47511,190.299356,122.583498,0.79176,1737.741929,4.07728138685653
-c4,1737.741929
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0
-c2,0,101.577,30.7428,17.7729,0,0,150.0927,0
-c3,0,0,30.3046,17.8726,18.1884,0,66.3656,0
-c4,0,98.4261,30.5781,17.9593,18.3894,0,165.3529,0
-c5,0,102.312,30.6223,18.2865,18.0635,0,169.2843,0
-c6,0,103.328,30.8374,0,18.2469,0,152.4123,0
-c7,0,102.713,30.6892,18.2789,0,0,151.6811,0
-c8,0,102.804,30.2146,17.6904,18.0145,0,168.7235,0
-c9,0,0,0,18.007,17.9143,0,35.9213,0
-c10,0,0,30.9873,17.7668,18.1695,0,66.9236,0
-c11,0,101.253,30.7777,18.1746,0,0,150.2053,0
-c12,0,0,30.9036,17.9928,17.8342,0,66.7306,0
-c13,0,102.539,30.5572,18.2881,18.1205,0,169.5048,0
-c14,0,103.577,30.7469,18.0862,18.1303,0,170.5404,0
-c15,0,103.056,30.5244,0,18.187,0,151.7674,0
-c16,0,101.268,30.5238,17.9011,0,0,149.6929,0
-c17,0,103.891,30.4408,17.8737,17.8652,0,170.0707,0
-c18,0,0,0,17.8041,17.8634,0,35.6675,0
-c19,0,101.925,0,0,18.0362,0,119.9612,0
-c20,0,0,30.7311,17.8306,17.8473,0,66.409,0
-c21,0,0,0,0,0,0,0,0
-c22,0,101.492,30.0116,17.977,0,0,149.4806,0
-c23,0,0,30.6942,18.4031,18.2642,0,67.3615,0
-c24,0,102.604,30.7051,17.4343,17.9855,0,168.7289,0
-c25,0,102.404,30.8007,17.8214,18.0542,0,169.0803,0
-c26,0,103.762,30.5876,17.6232,0,0,151.9728,0
-c27,0,0,0,17.7149,18.191,0,35.9059,0
-c28,0,0,31.3332,18.1906,17.8624,0,67.3862,0
-c29,0,101.665,30.3239,0,17.9223,0,149.9112,0
-c30,0,103.28,29.8201,17.8587,17.9835,0,168.9423,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_tensors.txt b/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_tensors.txt
deleted file mode 100644
index e7f0a6e270ba81fa11ec07b261192e5b50a4d93d..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/alexnet_cifar10/alexnet_tensors.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-#Conv1,4
-Conv1,925.494,3271.78,759.668,1563.71,5.44249,6.60174,83.2352,403.353
-Add1,114.477,689.511,93.2842,353.575,309.757,1205.25,83.2903,330.731
-Tanh1,76.4029,470.583,71.349,309.397,100.913,396.573,83.2272,381.858
-Pool1,175.352,985.339,77.3703,318.571,210.001,844.263,20.9052,88.6981
-#Conv2,4
-Conv2,2080.05,13944.8,1196.88,6519.17,26.722,110.701,62.1589,473.091
-Add2,139.245,1083.98,57.2394,332.553,183.024,1140.1,62.1204,356.499
-Tanh2,55.9198,411.855,53.7113,299.392,76.8765,416.385,62.0292,344.851
-Pool2,132.287,891.752,58.7924,314.375,76.8368,405.93,15.6216,82.0579
-#Conv3,3
-Conv3,1014.71,6863.91,561.436,3134.15,20.4033,103.834,31.0982,213.521
-Add3,73.7075,582.264,64.5391,421.019,39.2865,257.541,31.1203,195.786
-Tanh3,27.9644,214.918,27.4497,170.841,39.0187,237.416,31.0592,188.341
-#Conv4,3
-Conv4,1226.86,9535.92,627.538,4183.57,39.4494,231.112,20.805,157.506
-Add4,47.9113,399.265,40.9697,274.189,75.4038,524.503,20.652,132.57
-Tanh4,18.6218,150.823,18.3085,118.735,26.1769,164.262,20.6587,128.708
-#Conv5,4
-Conv5,861.092,6591.9,430.284,2806.26,26.6141,161.682,20.7426,148.724
-Add5,47.955,388.123,41.049,260.675,79.1767,523.707,20.6328,125.982
-Tanh5,18.6201,147.003,18.3283,114.102,26.1523,156.042,20.633,123.091
-Pool3,44.6908,343.93,20.5304,123.325,26.4508,154.684,5.2822,29.1224
-#FC1,2
-Mul1,3.73053,25.3858,6.85001,40.5555,7.36824,40.3988,0.141077,0.0604596
-Add6,0.1721,0.083548,0.580578,3.19613,0.460408,1.00469,0.0886822,0.0103369
diff --git a/llvm/projects/soc_simulator/ddr_test.cpp b/llvm/projects/soc_simulator/ddr_test.cpp
deleted file mode 100644
index 53c0e3b6d94f6fd2e1a9ea2a177767b4c46c9b9c..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/ddr_test.cpp
+++ /dev/null
@@ -1,70 +0,0 @@
-#include <iostream>
-#include <fstream>
-#include <cstdlib>
-#include <cstring>
-#include <chrono>
-
-// Goes through a 16 MB array to clear out the cache
-char clearCache() {
-    static const unsigned num_bytes = 16 * 1024 * 1024;
-    static char *temp = (char *) std::malloc(num_bytes);
-    for (unsigned i = 0; i < num_bytes; i++)
-        temp[i] = rand();
-    return temp[rand() % num_bytes];
-}
-
-int main() {
-    srand(1);
-    std::chrono::time_point<std::chrono::high_resolution_clock> start;
-    std::chrono::time_point<std::chrono::high_resolution_clock> end;
-    std::ofstream ofs;
-    ofs.open("profile_data.txt");
-
-    // 1 MB to 64 MB
-    for (unsigned i = 20; i < 26; i++) {
-        const unsigned num_bytes = 1 << i;
-        char *src = (char *) std::malloc(num_bytes);
-        char *dst = (char *) std::malloc(num_bytes);
-
-        // Test writes
-        clearCache();
-        start = std::chrono::high_resolution_clock::now();
-        for (unsigned j = 0; j < num_bytes; j++)
-            src[j] = rand();
-        end = std::chrono::high_resolution_clock::now();
-        std::cout << num_bytes << "w = " << (static_cast<double>(num_bytes) / 1e9) / std::chrono::duration<double>(end - start).count() << "\n";
-        ofs << num_bytes << "w\t" << std::to_string(std::chrono::duration<double>(start.time_since_epoch()).count()) << "\n";
-        ofs << num_bytes << "w\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-
-        // Test reads
-        clearCache();
-        char sum;
-        start = std::chrono::high_resolution_clock::now();
-        for (unsigned j = 0; j < num_bytes; j++)
-            sum += src[j];
-        end = std::chrono::high_resolution_clock::now();
-        std::cout << num_bytes << "r = " << (static_cast<double>(num_bytes) / 1e9) / std::chrono::duration<double>(end - start).count() << "\n";
-        ofs << num_bytes << "r\t" << std::to_string(std::chrono::duration<double>(start.time_since_epoch()).count()) << "\n";
-        ofs << num_bytes << "r\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-
-        // Test read-then-write
-        clearCache();
-        start = std::chrono::high_resolution_clock::now();
-        std::memcpy(dst, src, num_bytes);
-        end = std::chrono::high_resolution_clock::now();
-        std::cout << num_bytes << "rw = " << (static_cast<double>(num_bytes) / 1e9) / std::chrono::duration<double>(end - start).count() << "\n";
-        ofs << num_bytes << "rw\t" << std::to_string(std::chrono::duration<double>(start.time_since_epoch()).count()) << "\n";
-        ofs << num_bytes << "rw\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-
-        // So the compiler doesn't optimize out everything
-        std::cout << num_bytes << ": " << sum << "\n";
-        std::cout << num_bytes << ": " << dst[rand() % num_bytes] << "\n";
-
-        free(src);
-        free(dst);
-    }
-    return 0;
-}
diff --git a/llvm/projects/soc_simulator/include/promise_timing_model.h b/llvm/projects/soc_simulator/include/promise_timing_model.h
deleted file mode 100644
index 6ff5592d19cca115c6afbc9cb606c731ee9f9fe8..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/include/promise_timing_model.h
+++ /dev/null
@@ -1,162 +0,0 @@
-#include <iostream>
-#include <fstream>
-#include <string>
-#include <boost/algorithm/string.hpp>
-
-#include <cmath>
-#include <cassert>
-#include <algorithm>
-#include <utility>
-#include <vector>
-
-// NOTE 1: This code uses seconds for all computations. Using clock cycles
-// would have been cleaner but it's not possible because we don't know anything
-// about Jetson's DRAM other than its bandwidth.
-
-// NOTE 2: All elements are assumed to be 1 byte long.
-
-// NOTE 3: PROMISE's frequency is fixed at 1 GHz and thus 1 cycle = 1 ns.
-
-#define NUM_ARGS (7)
-#define VOLTAGE_LEVELS (7)
-
-class Dram {
-private:
-    const double energy_per_bit = 20e-12; // 20 pJ/bit
-    double latency_;
-    double bandwidth_;
-
-public:
-    Dram(const double latency, const double bandwidth);
-
-    // Calculates (time, energy) of accessing 'num_bytes' in memory
-    std::pair<double, double> access(const unsigned num_bytes, const bool pipeline = false) const;
-};
-
-class Scratchpad {
-private:
-    // Line size, latency, and energy
-    const unsigned log_line_size = 6;
-    const unsigned line_size     = 1 << log_line_size; // 64 B
-    const double line_latency    = 1e-9;   // 1 ns
-    const double line_energy     = 12e-12; // 12 pJ
-
-    // Tag array. The tag is the address of the row being requested.
-    unsigned num_lines_;
-    std::vector<int> lines_;
-
-    // DRAM
-    Dram dram_;
-
-    // Enable flag
-    bool enable_;
-
-private:
-    // Calculates the index within the scratchpad array
-    unsigned getIndex(const unsigned address) const;
-
-public:
-    Scratchpad(const bool enable,
-               const unsigned size,
-               const double dram_latency,
-               const double dram_bandwidth);
-
-    // Clears the scratchpad
-    void clear();
-
-    // Calculates (time, energy) of accessing 'num_bytes' starting from 'address'
-    std::pair<double, double> access(const unsigned address,
-                                     const unsigned num_bytes);
-};
-
-class Promise {
-private:
-    // Compute energy in pJ/128-element-dot-product for swings 0 through 7
-    const double compute_energy_per_dot[VOLTAGE_LEVELS + 1] = {
-        0.0, // This makes indexing simpler
-        30.54403e-12,
-        31.68943e-12,
-        35.04211e-12,
-        47.21840426e-12,
-        52.68045671e-12,
-        80.03489e-12,
-        106.5494e-12
-    };
-
-    // SRAM access energy per byte
-    const double sram_energy_per_byte = 0.1875e-12; // 0.1875 pJ/B
-
-    // Leakage energy (converted from pJ/clock to mJ/s)
-    const double leakage_energy_per_s   = 6e-3;   // 6 pJ/ns ==> 6 mJ/s
-
-    const unsigned num_banks_ = 256;
-    const unsigned bank_x_ = 128;
-    const unsigned bank_y_ = 128;
-	const unsigned bank_size = bank_x_ * bank_y_;
-    const unsigned vector_size_ = bank_x_;
-
-    const double pipeline_latency_ = 14e-9; // 14 ns
-    const double reduction_latency_ = 10e-9; // 10 ns
-
-    const bool use_scratchpad_ = false;
-    const unsigned scratchpad_size_ = 512 * 1024; // 512 KB
-
-    const double dram_latency_ = 100e-9; // 100 ns
-    const double dram_bandwidth_ = 30e9;   // 30 GB/s (measured peak)
-
-    // Scratchpad for array A
-    Scratchpad scratch_;
-
-    // DRAM
-    Dram dram_;
-
-    // uint version of min
-    unsigned min(const unsigned x, const unsigned y) const;
-
-    // Calculates energy of loading data into the SRAM
-    double loadSRAM(const unsigned num_bytes) const;
-
-    // Calculates (time, energy) of computing 'num_elements' elements
-    std::pair<double, double> compute(const unsigned num_elements, 
-                                      const unsigned voltage_swing) const;
-
-    // Calculates the number of banks required to fill up an entire column; i.e. all the rows
-    unsigned banksPerColumnTile(const unsigned num_rows) const;
-
-    // Calculates the number of fully filled column tiles
-    unsigned activeColumnTiles(const unsigned num_rows, const unsigned remaining_columns) const;
-
-    // Calculates the number of rows of A that can be operated on in parallel
-    // based on the tiling of *B*
-    unsigned numRowsA(const unsigned num_rows, const unsigned num_cols) const;
-
-    // Calculates (time, energy) of A x B (GEMM)
-    std::pair<double, double> run(const unsigned num_rows_a,
-                            const unsigned num_cols_a,
-                            const unsigned num_rows_b,
-                            const unsigned num_cols_b, 
-                            const unsigned voltage_swing, 
-                            const unsigned patch_factor);
-public:
-    Promise();
-
-    std::pair<double, double> fc_profile(const unsigned num_rows_a,
-                            const unsigned num_cols_a,
-                            const unsigned num_rows_b,
-                            const unsigned num_cols_b,
-                            const unsigned voltage_swing,
-                            const unsigned patch_factor);
-
-    std::pair<double, double> conv_profile(const unsigned n,
-                            const unsigned c,
-                            const unsigned h,
-                            const unsigned w,
-                            const unsigned c_out,
-                            const unsigned c_in,
-                            const unsigned k_h,
-                            const unsigned k_w,
-                            const unsigned s_h,
-                            const unsigned s_w,
-                            const unsigned voltage_swing,
-                            const unsigned patch_factor);
-};
diff --git a/llvm/projects/soc_simulator/mobilenet_cifar10/mobilenet_layers.txt b/llvm/projects/soc_simulator/mobilenet_cifar10/mobilenet_layers.txt
deleted file mode 100644
index ec202b5be38d401551b82746655d45847567307c..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_cifar10/mobilenet_layers.txt
+++ /dev/null
@@ -1,83 +0,0 @@
-Conv1,2000,3,32,32,32,3,3,3,1,1
-NML1
-NML2
-NML3
-NML4
-NML5
-Conv3,2000,32,32,32,64,32,1,1,1,1
-NML6
-NML7
-NML8
-NML9
-NML10
-Conv5,2000,64,16,16,128,64,1,1,1,1
-NML11
-NML12
-NML13
-NML14
-NML15
-Conv7,2000,128,16,16,128,128,1,1,1,1
-NML16
-NML17
-NML18
-NML19
-NML20
-Conv9,2000,128,8,8,256,128,1,1,1,1
-NML21
-NML22
-NML23
-NML24
-NML25
-Conv11,2000,256,8,8,256,256,1,1,1,1
-NML26
-NML27
-NML28
-NML29
-NML30
-Conv13,2000,256,4,4,512,256,1,1,1,1
-NML31
-NML32
-NML33
-NML34
-NML35
-Conv15,2000,512,4,4,512,512,1,1,1,1
-NML36
-NML37
-NML38
-NML39
-NML40
-Conv17,2000,512,4,4,512,512,1,1,1,1
-NML41
-NML42
-NML43
-NML44
-NML45
-Conv19,2000,512,4,4,512,512,1,1,1,1
-NML46
-NML47
-NML48
-NML49
-NML50
-Conv21,2000,512,4,4,512,512,1,1,1,1
-NML51
-NML52
-NML53
-NML54
-NML55
-Conv23,2000,512,4,4,512,512,1,1,1,1
-NML56
-NML57
-NML58
-NML59
-NML60
-Conv25,2000,512,2,2,1024,512,1,1,1,1
-NML61
-NML62
-NML63
-NML64
-NML65
-Conv27,2000,1024,2,2,1024,1024,1,1,1,1
-NML66
-NML67
-NML68
-FC1,2000,1024,1024,10
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/HA_loss1.txt b/llvm/projects/soc_simulator/mobilenet_shallow/HA_loss1.txt
deleted file mode 100644
index 98ec2de71c8abc518c52ee3089721b0d96a06fdb..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/HA_loss1.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,9 9
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/HA_loss2.txt b/llvm/projects/soc_simulator/mobilenet_shallow/HA_loss2.txt
deleted file mode 100644
index 9fd37f52c3a00843b7ffaca9b3d8ecbc5f1139ec..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/HA_loss2.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9 9
-9,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,6,8,8,8,9 9
-9,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,8,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,8,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,5,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,6,8,8,8,9 9
-9,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,8,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,5,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,8,9 9
-9,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,5,8,8,9,8,8,8,8,8,8,9 9
-9,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,6,8,8,8,9 9
-9,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,7,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,8,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,5,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,8,9 9
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/HA_results_loss1.out b/llvm/projects/soc_simulator/mobilenet_shallow/HA_results_loss1.out
deleted file mode 100644
index ddc3ead56dc7ccf7afe7b065617716f129141aa9..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/HA_results_loss1.out
+++ /dev/null
@@ -1,319 +0,0 @@
-Compute Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,572.966,184.509,138.685,510.748,198.623,146.196,1046.81,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,1372.91,218.702,165.077,504.488,71.2665,37.2098,954.913,148.915,79.6361,785.077,146.853,77.9379,1118.97,148.577,78.6669,256.743,121.755,15.0166,577.914,250.554,36.6268,125.046,5.547644,13535.292944,0.999999992611907
-c1,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,1118.97,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,25.29103,11378.11813,1.18958976083693
-c2,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10019.043749,1.35095655313964
-c3,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10019.043749,1.35095655313964
-c4,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10626.917401,1.27368006223125
-c5,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10626.917401,1.27368006223125
-c6,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,5.547644,10255.116092,1.31985759016157
-c7,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10349.083749,1.30787354141573
-c8,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,1118.97,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,11142.045787,1.21479422013441
-c9,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,1118.97,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,25.29103,11378.11813,1.18958976083693
-c10,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10626.917401,1.27368006223125
-c11,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10019.043749,1.35095655313964
-c12,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10019.043749,1.35095655313964
-c13,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10626.917401,1.27368006223125
-c14,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,1118.97,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,11142.045787,1.21479422013441
-c15,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,5.547644,10255.116092,1.31985759016157
-c16,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10349.083749,1.30787354141573
-c17,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,1118.97,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,25.29103,11378.11813,1.18958976083693
-c18,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10019.043749,1.35095655313964
-c19,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10019.043749,1.35095655313964
-c20,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10626.917401,1.27368006223125
-c21,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,5.547644,10255.116092,1.31985759016157
-c22,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,1118.97,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,11142.045787,1.21479422013441
-c23,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10349.083749,1.30787354141573
-c24,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,10626.917401,1.27368006223125
-c2,10019.043749
-
-Compute Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,166.211,52.3819,38.6469,138.263,52.1919,38.1309,261.935,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,292.058,45.663,34.4299,103.885,15.8328,8.79131,198.587,31.2316,17.2031,162.139,31.2579,17.184,233.12,31.2863,17.1913,53.1921,25.9908,4.43176,119.521,51.9366,8.7572,26.725,2.837158,3056.760228,0.999999967285626
-c1,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,233.12,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,2803.076713,1.09050177070549
-c2,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2516.175454,1.21484378271644
-c3,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2516.175454,1.21484378271644
-c4,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2673.982838,1.14314874061474
-c5,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2673.982838,1.14314874061474
-c6,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,2.837158,2574.757034,1.18720332400873
-c7,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2526.995454,1.20964210766475
-c8,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,233.12,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2744.495133,1.11377866182652
-c9,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,233.12,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,2803.076713,1.09050177070549
-c10,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2673.982838,1.14314874061474
-c11,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2516.175454,1.21484378271644
-c12,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2516.175454,1.21484378271644
-c13,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2673.982838,1.14314874061474
-c14,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,233.12,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2744.495133,1.11377866182652
-c15,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,2.837158,2574.757034,1.18720332400873
-c16,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2526.995454,1.20964210766475
-c17,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,233.12,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,2803.076713,1.09050177070549
-c18,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2516.175454,1.21484378271644
-c19,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2516.175454,1.21484378271644
-c20,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2673.982838,1.14314874061474
-c21,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,2.837158,2574.757034,1.18720332400873
-c22,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,233.12,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2744.495133,1.11377866182652
-c23,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2526.995454,1.20964210766475
-c24,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2673.982838,1.14314874061474
-c2,2516.175454
-
-Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,572.966,184.509,138.685,510.748,198.623,146.196,1046.81,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,1372.91,218.702,165.077,504.488,71.2665,37.2098,954.913,148.915,79.6361,785.077,146.853,77.9379,1118.97,148.577,78.6669,256.743,121.755,15.0166,577.914,250.554,36.6268,125.046,5.547644,13535.292944,0.999999992611907
-c1,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,1185.8437,378.575,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,25.29103,13597.77833,0.995404728329582
-c2,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,11965.4622918,1.1311968146986
-c3,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,11965.4622918,1.1311968146986
-c4,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,12528.1959148,1.0803864281825
-c5,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,12528.1959148,1.0803864281825
-c6,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,5.6044758,12182.9454528,1.11100331897069
-c7,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,12961.9392918,1.04423362390992
-c8,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,1185.8437,378.575,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,13380.295169,1.01158402500721
-c9,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,1185.8437,378.575,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,25.29103,13597.77833,0.995404728329582
-c10,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,12528.1959148,1.0803864281825
-c11,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,11965.4622918,1.1311968146986
-c12,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,11965.4622918,1.1311968146986
-c13,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,12528.1959148,1.0803864281825
-c14,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,1185.8437,378.575,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,13380.295169,1.01158402500721
-c15,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,5.6044758,12182.9454528,1.11100331897069
-c16,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,12961.9392918,1.04423362390992
-c17,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,1185.8437,378.575,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,25.29103,13597.77833,0.995404728329582
-c18,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,11965.4622918,1.1311968146986
-c19,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,11965.4622918,1.1311968146986
-c20,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,12528.1959148,1.0803864281825
-c21,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,5.6044758,12182.9454528,1.11100331897069
-c22,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,1185.8437,378.575,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,13380.295169,1.01158402500721
-c23,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,12961.9392918,1.04423362390992
-c24,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,12528.1959148,1.0803864281825
-c2,11965.4622918
-
-Leakage Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,21.344501,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,21.344501,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,21.344501,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,23.795528,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,23.795528,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,23.795528,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c0,0
-
-Memory Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,5.877922,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,5.877922,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,5.877922,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,66.8737,270.154,0,13.5633,34.4345,0,0,0,0,0,0,2219.6602,0
-c2,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c3,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c4,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c5,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c6,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c7,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,2549.1263318,0
-c8,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,66.8737,270.154,0,13.5633,34.4345,0,0,0,0,0,0,2219.6602,0
-c9,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,66.8737,270.154,0,13.5633,34.4345,0,0,0,0,0,0,2219.6602,0
-c10,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c11,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c12,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c13,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c14,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,66.8737,270.154,0,13.5633,34.4345,0,0,0,0,0,0,2219.6602,0
-c15,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c16,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,2549.1263318,0
-c17,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,66.8737,270.154,0,13.5633,34.4345,0,0,0,0,0,0,2219.6602,0
-c18,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c19,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c20,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c21,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c22,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,66.8737,270.154,0,13.5633,34.4345,0,0,0,0,0,0,2219.6602,0
-c23,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,2549.1263318,0
-c24,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,18.9605,73.8267,0,4.99161,10.5834,0,0,0,0,0,0,622.41277,0
-c2,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c3,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c4,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c5,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c6,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c7,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,716.022858,0
-c8,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,18.9605,73.8267,0,4.99161,10.5834,0,0,0,0,0,0,622.41277,0
-c9,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,18.9605,73.8267,0,4.99161,10.5834,0,0,0,0,0,0,622.41277,0
-c10,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c11,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c12,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c13,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c14,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,18.9605,73.8267,0,4.99161,10.5834,0,0,0,0,0,0,622.41277,0
-c15,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c16,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,716.022858,0
-c17,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,18.9605,73.8267,0,4.99161,10.5834,0,0,0,0,0,0,622.41277,0
-c18,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c19,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c20,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c21,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c22,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,18.9605,73.8267,0,4.99161,10.5834,0,0,0,0,0,0,622.41277,0
-c23,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,716.022858,0
-c24,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c0,0
-
-Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,166.211,52.3819,38.6469,138.263,52.1919,38.1309,261.935,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,292.058,45.663,34.4299,103.885,15.8328,8.79131,198.587,31.2316,17.2031,162.139,31.2579,17.184,233.12,31.2863,17.1913,53.1921,25.9908,4.43176,119.521,51.9366,8.7572,26.725,2.837158,3056.760228,0.999999967285626
-c1,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,252.0805,101.4722,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,3425.489483,0.892357181049411
-c2,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3054.360079,1.00078577798928
-c3,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3054.360079,1.00078577798928
-c4,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3206.289541,0.953363722638182
-c5,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3206.289541,0.953363722638182
-c6,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,3.140746,3110.564114,0.982702821000178
-c7,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3251.273779,0.940173095765213
-c8,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,252.0805,101.4722,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,3369.285448,0.907242851474695
-c9,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,252.0805,101.4722,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,3425.489483,0.892357181049411
-c10,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3206.289541,0.953363722638182
-c11,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3054.360079,1.00078577798928
-c12,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3054.360079,1.00078577798928
-c13,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3206.289541,0.953363722638182
-c14,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,252.0805,101.4722,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,3369.285448,0.907242851474695
-c15,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,3.140746,3110.564114,0.982702821000178
-c16,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3251.273779,0.940173095765213
-c17,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,252.0805,101.4722,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,3425.489483,0.892357181049411
-c18,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3054.360079,1.00078577798928
-c19,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3054.360079,1.00078577798928
-c20,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3206.289541,0.953363722638182
-c21,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,3.140746,3110.564114,0.982702821000178
-c22,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,252.0805,101.4722,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,3369.285448,0.907242851474695
-c23,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3251.273779,0.940173095765213
-c24,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3206.289541,0.953363722638182
-c2,3054.360079
-
-Unpatch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/HA_results_loss2.out b/llvm/projects/soc_simulator/mobilenet_shallow/HA_results_loss2.out
deleted file mode 100644
index 25c65eae81141ec16294f4c133b32879d039277a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/HA_results_loss2.out
+++ /dev/null
@@ -1,374 +0,0 @@
-Compute Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,572.966,184.509,138.685,510.748,198.623,146.196,1046.81,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,1372.91,218.702,165.077,504.488,71.2665,37.2098,954.913,148.915,79.6361,785.077,146.853,77.9379,1118.97,148.577,78.6669,256.743,121.755,15.0166,577.914,250.554,36.6268,125.046,5.547644,13535.292944,0.999999992611907
-c1,572.966,129.571,87.353,510.748,185.039,114.322,31.422697,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,5.547644,7728.157869,1.75142550117301
-c2,572.966,129.571,87.353,510.748,185.039,114.322,31.422697,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,11.801625,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,8111.070457,1.66874308993882
-c3,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,7.768049,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,8618.819472,1.57043465534099
-c4,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,9619.502135,1.40706791404992
-c5,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,25.29103,9243.791103,1.46425775385398
-c6,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,5.547644,10497.867418,1.28933737454697
-c7,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,10646.660787,1.27131812383798
-c8,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.887477,192.276,31.8826,63.4788,5.547644,8984.000194,1.50659978863086
-c9,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,5.547644,9227.95744,1.4667701802191
-c10,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,7.768049,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,9591.81545,1.41112940229548
-c11,572.966,129.571,87.353,510.748,185.039,114.322,31.422697,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,5.547644,7728.157869,1.75142550117301
-c12,572.966,129.571,87.353,510.748,185.039,114.322,31.422697,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,11.801625,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,8111.070457,1.66874308993882
-c13,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,7.768049,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,8618.819472,1.57043465534099
-c14,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,9619.502135,1.40706791404992
-c15,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,25.29103,9243.791103,1.46425775385398
-c16,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,10646.660787,1.27131812383798
-c17,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,7.768049,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,9591.81545,1.41112940229548
-c18,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.887477,192.276,31.8826,63.4788,5.547644,8984.000194,1.50659978863086
-c19,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,5.547644,9227.95744,1.4667701802191
-c20,572.966,129.571,87.353,510.748,185.039,114.322,31.422697,331.516,220.807,405.855,92.5426,67.4597,15.711348,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,954.913,109.567,57.0866,785.077,108.787,56.7435,7.768049,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,5.547644,9238.670838,1.46506927617991
-c21,572.966,129.571,87.353,510.748,185.039,114.322,31.422697,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,5.547644,7728.157869,1.75142550117301
-c22,572.966,129.571,87.353,510.748,185.039,114.322,31.422697,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,11.801625,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,8111.070457,1.66874308993882
-c23,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,7.768049,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,8618.819472,1.57043465534099
-c24,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,9619.502135,1.40706791404992
-c25,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,25.29103,9243.791103,1.46425775385398
-c26,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,25.29103,10646.660787,1.27131812383798
-c27,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.887477,192.276,31.8826,63.4788,5.547644,8984.000194,1.50659978863086
-c28,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,15.711348,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,5.547644,9227.95744,1.4667701802191
-c29,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,7.768049,151.781,104.976,504.488,63.3415,35.9187,372.978,109.567,57.0866,785.077,108.787,56.7435,623.585,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,5.547644,9591.81545,1.41112940229548
-c1,7728.157869
-
-Compute Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,166.211,52.3819,38.6469,138.263,52.1919,38.1309,261.935,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,292.058,45.663,34.4299,103.885,15.8328,8.79131,198.587,31.2316,17.2031,162.139,31.2579,17.184,233.12,31.2863,17.1913,53.1921,25.9908,4.43176,119.521,51.9366,8.7572,26.725,2.837158,3056.760228,0.999999967285626
-c1,166.211,44.0763,27.7776,138.263,46.5303,27.6531,16.261248,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,1922.094426,1.59032773188388
-c2,166.211,44.0763,27.7776,138.263,46.5303,27.6531,16.261248,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2019.082613,1.51393511930881
-c3,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2149.756178,1.42191012966541
-c4,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2404.551749,1.27123905823501
-c5,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,2305.325945,1.32595570792677
-c6,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,2.837158,2639.079526,1.1582675254983
-c7,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2677.486133,1.14165301405679
-c8,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2243.24107,1.36265340921859
-c9,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,2.837158,2301.82265,1.32797376687671
-c10,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2401.048454,1.27309388346505
-c11,166.211,44.0763,27.7776,138.263,46.5303,27.6531,16.261248,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,1922.094426,1.59032773188388
-c12,166.211,44.0763,27.7776,138.263,46.5303,27.6531,16.261248,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2019.082613,1.51393511930881
-c13,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2149.756178,1.42191012966541
-c14,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2404.551749,1.27123905823501
-c15,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,2305.325945,1.32595570792677
-c16,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2677.486133,1.14165301405679
-c17,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2401.048454,1.27309388346505
-c18,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2243.24107,1.36265340921859
-c19,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,2.837158,2301.82265,1.32797376687671
-c20,166.211,44.0763,27.7776,138.263,46.5303,27.6531,16.261248,86.2346,54.8624,92.893,22.5995,15.858,8.303616,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,198.587,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,2.837158,2270.151198,1.34650066305845
-c21,166.211,44.0763,27.7776,138.263,46.5303,27.6531,16.261248,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,1922.094426,1.59032773188388
-c22,166.211,44.0763,27.7776,138.263,46.5303,27.6531,16.261248,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2019.082613,1.51393511930881
-c23,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2149.756178,1.42191012966541
-c24,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2404.551749,1.27123905823501
-c25,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,2305.325945,1.32595570792677
-c26,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2677.486133,1.14165301405679
-c27,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2243.24107,1.36265340921859
-c28,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,2.837158,2301.82265,1.32797376687671
-c29,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,8.303616,40.4994,26.741,103.885,15.2944,7.99054,97.6367,27.6042,13.5937,162.139,27.6361,13.5283,166.111,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2401.048454,1.27309388346505
-c1,1922.094426
-
-Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,572.966,184.509,138.685,510.748,198.623,146.196,1046.81,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,1372.91,218.702,165.077,504.488,71.2665,37.2098,954.913,148.915,79.6361,785.077,146.853,77.9379,1118.97,148.577,78.6669,256.743,121.755,15.0166,577.914,250.554,36.6268,125.046,5.547644,13535.292944,0.999999992611907
-c1,572.966,281.424,87.353,651.192,404.181,114.322,152.598914,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,5.6044758,9902.2150608,1.36689545967273
-c2,572.966,281.424,87.353,651.192,404.181,114.322,152.598914,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,88.025151,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,10254.831911,1.31989416593867
-c3,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,83.991575,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,10671.7004468,1.26833515283173
-c4,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,11596.947343,1.16714273394159
-c5,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,25.29103,11247.787158,1.20337383998555
-c6,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,5.6044758,12410.7956558,1.09060637289712
-c7,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,12547.882469,1.07869139431058
-c8,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.476659,192.276,31.8826,63.4788,5.6044758,11006.6422628,1.22973859764412
-c9,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,5.6044758,11232.0103268,1.2050641363103
-c10,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,83.991575,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,11569.3174898,1.16993010511988
-c11,572.966,281.424,87.353,651.192,404.181,114.322,152.598914,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,5.6044758,9902.2150608,1.36689545967273
-c12,572.966,281.424,87.353,651.192,404.181,114.322,152.598914,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,88.025151,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,10254.831911,1.31989416593867
-c13,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,83.991575,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,10671.7004468,1.26833515283173
-c14,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,11596.947343,1.16714273394159
-c15,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,25.29103,11247.787158,1.20337383998555
-c16,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,12547.882469,1.07869139431058
-c17,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,83.991575,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,11569.3174898,1.16993010511988
-c18,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.476659,192.276,31.8826,63.4788,5.6044758,11006.6422628,1.22973859764412
-c19,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,5.6044758,11232.0103268,1.2050641363103
-c20,572.966,281.424,87.353,651.192,404.181,114.322,152.598914,331.516,220.807,475.2483,216.7906,67.4597,76.301921,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,1021.9148,364.726,57.0866,852.5958,379.656,56.7435,52.908078,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,5.6044758,11670.4277888,1.1597940600781
-c21,572.966,281.424,87.353,651.192,404.181,114.322,152.598914,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,5.6044758,9902.2150608,1.36689545967273
-c22,572.966,281.424,87.353,651.192,404.181,114.322,152.598914,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,88.025151,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,10254.831911,1.31989416593867
-c23,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,83.991575,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,10671.7004468,1.26833515283173
-c24,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,11596.947343,1.16714273394159
-c25,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,25.29103,11247.787158,1.20337383998555
-c26,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,25.29103,12547.882469,1.07869139431058
-c27,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.476659,192.276,31.8826,63.4788,5.6044758,11006.6422628,1.22973859764412
-c28,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,91.934874,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,5.6044758,11232.0103268,1.2050641363103
-c29,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,83.991575,151.781,104.976,536.2213,121.4188,35.9187,372.978,109.567,57.0866,852.5958,379.656,56.7435,623.585,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,5.6044758,11569.3174898,1.16993010511988
-c1,9902.2150608
-
-Leakage Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,50.323486,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,122.707169,0
-c2,0,0,0,0,0,0,50.323486,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,110.217081,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c11,0,0,0,0,0,0,50.323486,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,122.707169,0
-c12,0,0,0,0,0,0,50.323486,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,110.217081,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c20,0,0,0,0,0,0,50.323486,0,0,0,0,0,25.140123,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,96.80811,0
-c21,0,0,0,0,0,0,50.323486,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,122.707169,0
-c22,0,0,0,0,0,0,50.323486,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,110.217081,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,70.852731,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,168.660691,0
-c2,0,0,0,0,0,0,70.852731,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,150.911873,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c11,0,0,0,0,0,0,70.852731,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,168.660691,0
-c12,0,0,0,0,0,0,70.852731,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,150.911873,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c20,0,0,0,0,0,0,70.852731,0,0,0,0,0,35.450450,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,130.098709,0
-c21,0,0,0,0,0,0,70.852731,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,168.660691,0
-c22,0,0,0,0,0,0,70.852731,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,150.911873,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c0,0
-
-Memory Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,16.574126,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,39.762784,0
-c2,0,0,0,0,0,0,16.574126,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,35.604854,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.15793,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c11,0,0,0,0,0,0,16.574126,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,39.762784,0
-c12,0,0,0,0,0,0,16.574126,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,35.604854,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c20,0,0,0,0,0,0,16.574126,0,0,0,0,0,8.307830,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,30.759878,0
-c21,0,0,0,0,0,0,16.574126,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,39.762784,0
-c22,0,0,0,0,0,0,16.574126,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,35.604854,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c2,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c3,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c4,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c5,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c6,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c7,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c8,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c9,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c10,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c11,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c12,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c13,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c14,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c15,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c16,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c17,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c18,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c19,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c20,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,67.0018,255.159,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,2204.8501318,0
-c21,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c22,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c23,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c24,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c25,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c26,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c27,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c28,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c29,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,1882.6893318,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c2,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c3,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c4,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c5,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c6,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c7,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c8,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c9,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c10,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c11,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c12,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c13,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c14,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c15,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c16,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c17,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c18,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c19,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c20,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,18.9755,69.3636,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,618.268258,0
-c21,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c22,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c23,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c24,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c25,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c26,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c27,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c28,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c29,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,529.929158,0
-c0,0
-
-Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,166.211,52.3819,38.6469,138.263,52.1919,38.1309,261.935,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,292.058,45.663,34.4299,103.885,15.8328,8.79131,198.587,31.2316,17.2031,162.139,31.2579,17.184,233.12,31.2863,17.1913,53.1921,25.9908,4.43176,119.521,51.9366,8.7572,26.725,2.837158,3056.760228,0.999999967285626
-c1,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,32.835374,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2491.786368,1.22673442016622
-c2,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,32.835374,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,2584.313037,1.18281340764627
-c3,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2702.873994,1.13092956671033
-c4,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,2947.330125,1.03712851789453
-c5,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,2851.604698,1.07194385075516
-c6,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,3.140746,3173.166614,0.963315357656308
-c7,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,3209.489248,0.95241326471603
-c8,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2792.200956,1.09474932739228
-c9,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,3.140746,2848.404991,1.07314800049274
-c10,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2944.130418,1.03825567831025
-c11,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,32.835374,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2491.786368,1.22673442016622
-c12,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,32.835374,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,2584.313037,1.18281340764627
-c13,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2702.873994,1.13092956671033
-c14,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,2947.330125,1.03712851789453
-c15,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,2851.604698,1.07194385075516
-c16,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,3209.489248,0.95241326471603
-c17,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2944.130418,1.03825567831025
-c18,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2792.200956,1.09474932739228
-c19,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,3.140746,2848.404991,1.07314800049274
-c20,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,32.835374,86.2346,54.8624,112.1726,56.6535,15.858,16.611446,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,217.5625,96.9678,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,3.140746,2919.179334,1.04712995453366
-c21,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,32.835374,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2491.786368,1.22673442016622
-c22,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,32.835374,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,2584.313037,1.18281340764627
-c23,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2702.873994,1.13092956671033
-c24,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,2947.330125,1.03712851789453
-c25,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,6.340453,2851.604698,1.07194385075516
-c26,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,3209.489248,0.95241326471603
-c27,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2792.200956,1.09474932739228
-c28,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,3.140746,2848.404991,1.07314800049274
-c29,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,19.078877,40.4994,26.741,113.63096,32.2143,7.99054,97.6367,27.6042,13.5937,181.236,101.1854,13.5283,166.111,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,2944.130418,1.03825567831025
-c1,2491.786368
-
-Unpatch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/HS_loss1.txt b/llvm/projects/soc_simulator/mobilenet_shallow/HS_loss1.txt
deleted file mode 100644
index 4d905b811accaa167503d0c189e3bb475c3ad9e6..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/HS_loss1.txt
+++ /dev/null
@@ -1,64 +0,0 @@
-9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9 9
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,6,8,8,9,8,8,3,8,8,8,3
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,3,8,8,8,3
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,5,8,8,9,8,8,4,8,8,8,3
-9,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,3,8,8,9,8,8,8,8,8,8,3
-8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,6,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,7,8,8,9,8,8,3,8,8,8,4
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,4,8,8,9,8,8,6,8,8,8,3
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,4,8,8,9,8,8,4,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,3,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,7,8,8,9,8,8,5,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,3,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,6,8,8,9,8,8,6,8,8,8,3
-9,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,7,8,8,9,8,8,5,8,8,8,3
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,4,8,8,8,4
-8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,6,8,8,9,8,8,6,8,8,8,3
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,3,8,8,9,8,8,5,8,8,8,3
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,3
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,7
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,3
-8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,3,8,8,9,8,8,4,8,8,8,7
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,3,8,8,9,8,8,5,8,8,8,4
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,6,8,8,9,8,8,3,8,8,8,7
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,4,8,8,9,8,8,3,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,3,8,8,9,8,8,6,8,8,9,8,8,6,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,4
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,4,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,4,8,8,9,8,8,6,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,4
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,5,8,8,9,8,8,3,8,8,8,7
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,3
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,8,8,8,8,3
-9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,3,8,8,9,8,8,4,8,8,8,9 9
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,3,8,8,9,8,8,9,8,8,8,7
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,4
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,3,8,8,9,8,8,6,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,3,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,3,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,5,8,8,8,4
-9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,3,8,8,9,8,8,4,8,8,8,9 9
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,7,8,8,9,8,8,4,8,8,8,4
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,3,8,8,8,7
-8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,5,8,8,9,8,8,6,8,8,9,8,8,9,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,5,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,4,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,4,8,8,9,8,8,7,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,6,8,8,9,8,8,6,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,9,8,8,9,8,8,6,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,4
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,3,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,3,8,8,8,5
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,5,8,8,9,8,8,4,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,5,8,8,9,8,8,4,8,8,8,4
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,3,8,8,8,8 8
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,6,8,8,9,8,8,5,8,8,8,4
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,5
-8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,6,8,8,9,8,8,4,8,8,8,9 9
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/HS_loss2.txt b/llvm/projects/soc_simulator/mobilenet_shallow/HS_loss2.txt
deleted file mode 100644
index b8677344889c875db8f5485832c7c2dce383cff7..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/HS_loss2.txt
+++ /dev/null
@@ -1,76 +0,0 @@
-9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9 9
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,5,8,8,9,8,8,9,8,8,9,8,8,5,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,6,8,8,9,8,8,8,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,3,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,9,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,9,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,8,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,3,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,6,8,8,8,7
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,4,8,8,8,7
-9,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,3,8,8,8,8 8
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,6,8,8,9,8,8,8,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,3,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,6,8,8,8,8 8
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,8,8,8,8,5
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,7,8,8,8,8 8
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,3,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,6,8,8,9,8,8,5,8,8,8,8 8
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,8,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,9,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,7,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,9,8,8,9,8,8,5,8,8,9,8,8,6,8,8,8,8 8
-9,8,8,9,8,8,6,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,9,8,8,9,8,8,4,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,7,8,8,9,8,8,5,8,8,8,6
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,5,8,8,8,6
-8,8,8,9,8,8,5,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,8,8,8,8,7
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,6,8,8,9,8,8,7,8,8,9,8,8,3,8,8,8,8 8
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,5,8,8,9,8,8,9,8,8,9,8,8,5,8,8,9,8,8,7,8,8,8,5
-8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,3,8,8,8,4
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,8,8,8,8,6
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,7,8,8,8,3
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,5
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,8,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,9,8,8,8,9 9
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,6,8,8,9,8,8,7,8,8,8,9 9
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,3,8,8,8,6
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,5,8,8,8,5
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,3,8,8,9,8,8,4,8,8,9,8,8,9,8,8,9,8,8,8,8,8,8,8 8
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,8,8,8,8,8 8
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,4,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,8,8,8,8,6
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,6,8,8,9,8,8,7,8,8,8,9 9
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,7,8,8,8,9 9
-8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,9,8,8,9,8,8,3,8,8,8,4
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,3,8,8,8,6
-8,8,8,9,8,8,6,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,6
-8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,3,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,8,8,8,8,9 9
-9,8,8,9,8,8,6,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,6
-8,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,9,8,8,9,8,8,5,8,8,8,4
-8,8,8,9,8,8,6,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,8,8,8,8,5
-8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,7,8,8,8,6
-9,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,8,8,8,9,8,8,3,8,8,8,9 9
-8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,9,8,8,9,8,8,4,8,8,8,6
-8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,4
-8,8,8,9,8,8,9,8,8,9,8,8,9,8,8,9,8,8,3,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,8,7
-8,8,8,9,8,8,6,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,6
-8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,9,8,8,9,8,8,4,8,8,8,7
-8,8,8,9,8,8,7,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,9,8,8,9,8,8,9,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,3,8,8,9,8,8,4,8,8,9,8,8,3,8,8,8,9 9
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,3,8,8,9,8,8,3,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,3,8,8,9,8,8,3,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,3,8,8,9,8,8,4,8,8,9,8,8,3,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,3,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,5,8,8,9,8,8,3,8,8,9,8,8,3,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,6,8,8,9,8,8,3,8,8,9,8,8,6,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,3,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,3,8,8,8,4
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,6,8,8,9,8,8,7,8,8,9,8,8,3,8,8,8,3
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,4,8,8,9,8,8,4,8,8,9,8,8,3,8,8,8,6
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,3,8,8,9,8,8,3,8,8,8,7
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,3,8,8,9,8,8,3,8,8,8,5
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,3,8,8,8,4
-8,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,7,8,8,9,8,8,3,8,8,9,8,8,3,8,8,8,6
-9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,9,8,8,9,8,8,5,8,8,9,8,8,6,8,8,9,8,8,3,8,8,8,6
-9,8,8,9,8,8,9,8,8,9,8,8,8,8,8,9,8,8,8,8,8,9,8,8,7,8,8,9,8,8,4,8,8,9,8,8,3,8,8,8,6
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/HS_results_loss1.out b/llvm/projects/soc_simulator/mobilenet_shallow/HS_results_loss1.out
deleted file mode 100644
index 4b0b840cee7d4daa6bac3bf282a7ff61dcde2cb1..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/HS_results_loss1.out
+++ /dev/null
@@ -1,748 +0,0 @@
-Compute Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,572.966,184.509,138.685,510.748,198.623,146.196,1046.81,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,1372.91,218.702,165.077,504.488,71.2665,37.2098,954.913,148.915,79.6361,785.077,146.853,77.9379,1118.97,148.577,78.6669,256.743,121.755,15.0166,577.914,250.554,36.6268,125.046,5.547644,13535.292944,0.999999992611907
-c1,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.884025,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,0.012918,9753.967549,1.38767047739672
-c2,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,0.012918,10020.667336,1.35073766597357
-c3,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,7.768049,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9752.849292,1.38782958702332
-c4,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,0.012918,10014.703506,1.3515420402348
-c5,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,0.012918,9509.663009,1.4233199208908
-c6,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,0.017407,9905.842548,1.36639490702316
-c7,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,6.962637,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,0.012918,9385.492525,1.44215050661765
-c8,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,6.962637,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9633.30488,1.40505184587232
-c9,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9562.861545,1.41540194206166
-c10,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,3.887477,192.276,31.8826,63.4788,0.012918,9719.843162,1.39254230538023
-c11,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9563.759279,1.41526908066306
-c12,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,0.012918,9691.52802,1.3966108106386
-c13,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,3.887477,192.276,31.8826,63.4788,0.012918,9785.200162,1.38324128087221
-c14,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.017407,10087.519853,1.34178598972433
-c15,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,0.012918,9505.753286,1.42390533336733
-c16,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.884025,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,3.887477,192.276,31.8826,63.4788,0.012918,9748.634689,1.38842958393238
-c17,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9758.83773,1.38697795575521
-c18,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9634.853001,1.40482608319116
-c19,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9638.143868,1.40434641658592
-c20,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9501.995634,1.42446842988658
-c21,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.884025,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,3.887477,192.276,31.8826,63.4788,0.017407,9748.639178,1.38842894459593
-c22,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.884025,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9689.509081,1.39690181320444
-c23,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9572.786868,1.4139344152592
-c24,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9387.909868,1.44177915959324
-c25,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,0.039278,9569.495203,1.41442077303249
-c26,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,6.962637,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,0.029504,10078.782441,1.34294919936402
-c27,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,0.029504,9901.947872,1.36693234323934
-c28,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9573.405724,1.41384301395307
-c29,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.884025,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.017407,9570.77457,1.41423170126729
-c30,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9569.496001,1.41442065508398
-c31,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,6.962637,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9564.657013,1.41513624420506
-c32,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,6.962637,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,0.012918,9504.231525,1.42413332061444
-c33,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.017407,9572.791357,1.41393375221837
-c34,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,7.768049,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,0.039278,9567.88112,1.4146593830729
-c35,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9640.09873,1.4040616369905
-c36,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,243.935,192.276,31.8826,63.4788,0.012918,9931.976455,1.36279952626207
-c37,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,5.547644,9902.901,1.36680077962204
-c38,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,577.914,192.276,31.8826,63.4788,0.039278,10285.771359,1.31592394386293
-c39,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.017407,9691.530357,1.39661047386213
-c40,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,0.039278,9568.600417,1.41455303938675
-c41,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,0.039278,9901.954696,1.36693140120855
-c42,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9896.218772,1.36772368508302
-c43,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,6.962637,108.421,56.8779,256.743,104.322,18.9772,3.887477,192.276,31.8826,63.4788,0.017407,9715.473295,1.39316864898893
-c44,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,5.547644,9572.861,1.413923465786
-c45,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,15.711348,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.017407,9642.05808,1.40377631946627
-c46,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,0.039278,9571.914696,1.41406324987935
-c47,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,3.884025,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,577.914,192.276,31.8826,63.4788,0.012918,10409.101668,1.3003324634226
-c48,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,7.768049,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9565.462425,1.4150170897251
-c49,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,6.962637,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9567.97424,1.41464561494633
-c50,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,6.962637,108.421,56.8779,256.743,104.322,18.9772,7.862657,192.276,31.8826,63.4788,0.039278,9569.035257,1.41448875869171
-c51,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,0.039278,9571.917646,1.41406281407467
-c52,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,2.583585,109.567,57.0866,785.077,108.787,56.7435,1118.97,108.421,56.8779,256.743,104.322,18.9772,5.906058,192.276,31.8826,63.4788,0.012918,10494.182661,1.28979009154499
-c53,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.017407,9570.371864,1.41429120988343
-c54,572.966,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,7.855674,109.567,57.0866,785.077,108.787,56.7435,5.167169,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9752.229634,1.38791776990351
-c55,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,0.019420,9901.934838,1.36693414254385
-c56,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9691.525868,1.39661112075555
-c57,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,7.768049,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.012918,9568.753292,1.41453043980799
-c58,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,7.768049,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.017407,9568.757781,1.41452977620806
-c59,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,2.585881,192.276,31.8826,63.4788,25.29103,9927.206448,1.3634543492728
-c60,572.966,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.884025,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.887477,192.276,31.8826,63.4788,0.017407,9636.534634,1.40458093262968
-c61,507.609,129.571,87.353,510.748,185.039,114.322,928.071,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.039278,9570.393735,1.41428797783639
-c62,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,709.5,155.327,109.967,873.476,157.738,111.071,1372.91,151.781,104.976,504.488,63.3415,35.9187,5.900812,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,0.019420,10021.57237,1.35061568277019
-c63,507.609,129.571,87.353,510.748,185.039,114.322,1046.81,331.516,220.807,405.855,92.5426,67.4597,524.623,155.327,109.967,873.476,157.738,111.071,1042.87,151.781,104.976,504.488,63.3415,35.9187,3.481319,109.567,57.0866,785.077,108.787,56.7435,11.801625,108.421,56.8779,256.743,104.322,18.9772,3.484413,192.276,31.8826,63.4788,5.547644,9509.764101,1.42330479052012
-c7,9385.492525
-
-Compute Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,166.211,52.3819,38.6469,138.263,52.1919,38.1309,261.935,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,292.058,45.663,34.4299,103.885,15.8328,8.79131,198.587,31.2316,17.2031,162.139,31.2579,17.184,233.12,31.2863,17.1913,53.1921,25.9908,4.43176,119.521,51.9366,8.7572,26.725,2.837158,3056.760228,0.999999967285626
-c1,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2437.901444,1.25384892409749
-c2,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2474.353444,1.23537731114143
-c3,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2437.901444,1.25384892409749
-c4,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,0.036040,2419.263024,1.26350879227472
-c5,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2393.505444,1.2771059735636
-c6,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2456.341444,1.24443615565865
-c7,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2375.493444,1.286789532946
-c8,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2419.889444,1.26318171652879
-c9,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c10,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2386.313444,1.28095498417873
-c11,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c12,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2463.533444,1.24080316886483
-c13,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2360.681444,1.294863441352
-c14,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2448.721444,1.24830862679746
-c15,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2393.505444,1.2771059735636
-c16,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2437.901444,1.25384892409749
-c17,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2437.901444,1.25384892409749
-c18,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2419.889444,1.26318171652879
-c19,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2419.889444,1.26318171652879
-c20,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2393.505444,1.2771059735636
-c21,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2437.901444,1.25384892409749
-c22,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2463.533444,1.24080316886483
-c23,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c24,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2375.493444,1.286789532946
-c25,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c26,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2448.721444,1.24830862679746
-c27,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2456.341444,1.24443615565865
-c28,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c29,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c30,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c31,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c32,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2393.505444,1.2771059735636
-c33,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c34,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c35,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2419.889444,1.26318171652879
-c36,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,63.1947,49.3667,6.98942,15.6197,0.036040,2522.115024,1.21198283096295
-c37,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2381.494562,1.28354695762068
-c38,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,119.521,49.3667,6.98942,15.6197,0.036040,2501.221324,1.2221070068685
-c39,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2463.533444,1.24080316886483
-c40,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c41,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2456.341444,1.24443615565865
-c42,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2456.341444,1.24443615565865
-c43,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2386.313444,1.28095498417873
-c44,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2370.674562,1.28940519633394
-c45,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2419.889444,1.26318171652879
-c46,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c47,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,119.521,49.3667,6.98942,15.6197,0.036040,2519.233324,1.2133691935329
-c48,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c49,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c50,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c51,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c52,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,233.12,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2600.309828,1.17553688315572
-c53,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c54,166.211,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2437.901444,1.25384892409749
-c55,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2456.341444,1.24443615565865
-c56,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2463.533444,1.24080316886483
-c57,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c58,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c59,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,6.340453,2462.645857,1.24125037921559
-c60,166.211,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2419.889444,1.26318171652879
-c61,191.843,44.0763,27.7776,138.263,46.5303,27.6531,243.923,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2445.521444,1.24994205653173
-c62,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,186.198,40.5929,27.7158,197.299,40.5301,27.3118,292.058,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,0.036040,2474.353444,1.23537731114143
-c63,191.843,44.0763,27.7776,138.263,46.5303,27.6531,261.935,86.2346,54.8624,92.893,22.5995,15.858,116.17,40.5929,27.7158,197.299,40.5301,27.3118,281.238,40.4994,26.741,103.885,15.2944,7.99054,4.151808,27.6042,13.5937,162.139,27.6361,13.5283,8.303616,27.6455,13.524,53.1921,26.0905,3.6779,4.613120,49.3667,6.98942,15.6197,2.837158,2396.306562,1.27561312434393
-c13,2360.681444
-
-Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,572.966,184.509,138.685,510.748,198.623,146.196,1046.81,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,1372.91,218.702,165.077,504.488,71.2665,37.2098,954.913,148.915,79.6361,785.077,146.853,77.9379,1118.97,148.577,78.6669,256.743,121.755,15.0166,577.914,250.554,36.6268,125.046,5.547644,13535.292944,0.999999992611907
-c1,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,34.122931,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,1.016415,12956.814663,1.04464663511683
-c2,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,1.016415,13754.07915,0.984092987831228
-c3,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,52.908078,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,12955.696406,1.04473680266681
-c4,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,1.016415,13299.219438,1.01775092179849
-c5,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,1.016415,13235.701823,1.02263506860028
-c6,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,1.020904,12414.011362,1.0903238639204
-c7,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,52.102666,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,1.016415,11886.288339,1.13873165820118
-c8,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,52.102666,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,11610.908994,1.16573929176609
-c9,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,11404.593359,1.18682818398218
-c10,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,22.476659,192.276,31.8826,63.4788,1.016415,12887.075976,1.05029976265968
-c11,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,11405.491093,1.1867347679254
-c12,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,1.016415,12758.502834,1.06088410325399
-c13,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,22.476659,192.276,31.8826,63.4788,1.016415,13088.305276,1.03415167626052
-c14,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.020904,13956.803967,0.969798879386962
-c15,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,1.016415,13231.7921,1.0229372362725
-c16,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,34.122931,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,22.476659,192.276,31.8826,63.4788,1.016415,12951.481803,1.04507677541245
-c17,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,12961.684844,1.04425412301551
-c18,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,11612.457115,1.16558388060334
-c19,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,11615.747982,1.16525365808979
-c20,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,13228.034448,1.02322781928676
-c21,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,34.122931,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,22.476659,192.276,31.8826,63.4788,1.020904,12951.486292,1.04507641318765
-c22,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,34.122931,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,12756.483895,1.06105200690921
-c23,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,11414.518682,1.18579619539847
-c24,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,11888.705682,1.1385001187003
-c25,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,1.042775,11411.227017,1.18613824834278
-c26,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,52.102666,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,1.033001,13948.066555,0.970406385256839
-c27,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,1.033001,12410.116686,1.09066604105364
-c28,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,11415.137538,1.1857319090873
-c29,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,34.122931,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.020904,11412.506384,1.1860052796444
-c30,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,11411.227815,1.1861381653948
-c31,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,52.102666,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,11406.388827,1.18664136657314
-c32,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,52.102666,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,1.016415,13230.270339,1.02305489569592
-c33,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.020904,11414.523171,1.18579572905932
-c34,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,52.908078,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,1.042775,11409.612934,1.18630604768677
-c35,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,11617.702844,1.16505758575884
-c36,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,243.935,192.276,31.8826,63.4788,1.016415,12980.362087,1.04275156185979
-c37,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,5.6044758,14430.3024488,0.937977072776313
-c38,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,609.1642,319.769,31.8826,63.4788,1.042775,13593.158191,0.995743053544936
-c39,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.020904,12758.505171,1.0608839089298
-c40,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,1.042775,11410.332231,1.18623126402961
-c41,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,1.042775,12410.12351,1.09066544132512
-c42,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,12404.387586,1.09116977690696
-c43,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,52.102666,108.421,56.8779,270.3063,138.7565,18.9772,22.476659,192.276,31.8826,63.4788,1.020904,12882.706109,1.05065602866454
-c44,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,5.6044758,13433.8254488,1.0075531273524
-c45,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,60.851377,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.020904,11619.662194,1.16486112948301
-c46,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,1.042775,11413.64651,1.18588680782714
-c47,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,34.122931,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,609.1642,319.769,31.8826,63.4788,1.016415,14941.7315,0.90587177620029
-c48,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,52.908078,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,11407.194239,1.18655758302673
-c49,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,52.102666,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,11409.706054,1.1862963656829
-c50,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,52.102666,108.421,56.8779,270.3063,138.7565,18.9772,26.451839,192.276,31.8826,63.4788,1.042775,11410.767071,1.18618605928613
-c51,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,1.042775,11413.64946,1.18588650131992
-c52,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,32.822491,109.567,57.0866,852.5958,379.656,56.7435,1185.8437,378.575,56.8779,270.3063,138.7565,18.9772,24.49524,192.276,31.8826,63.4788,1.016415,13286.866146,1.01869716255139
-c53,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.020904,11412.103678,1.18604713094995
-c54,572.966,281.424,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,38.09458,109.567,57.0866,852.5958,379.656,56.7435,50.307198,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,12955.076748,1.04478677377275
-c55,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,1.022917,12410.103652,1.09066718655101
-c56,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,12758.500682,1.06088428219528
-c57,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,52.908078,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.016415,11410.485106,1.18621537118182
-c58,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,52.908078,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.020904,11410.489595,1.18621490451291
-c59,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,21.175063,192.276,31.8826,63.4788,25.29103,12434.371765,1.08853853583862
-c60,572.966,281.424,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,34.122931,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.476659,192.276,31.8826,63.4788,1.020904,11614.138748,1.16541511352181
-c61,523.5897,129.571,87.353,651.192,404.181,114.322,928.071,331.516,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.042775,11412.125549,1.18604485792584
-c62,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,709.5,155.327,109.967,1015.027,717.543,111.071,1507.228,683.9,104.976,536.2213,121.4188,35.9187,36.139718,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,1.022917,13754.984184,0.984028237658145
-c63,523.5897,129.571,87.353,651.192,404.181,114.322,1336.459,1267.11,220.807,475.2483,216.7906,67.4597,662.986,676.028,109.967,1015.027,717.543,111.071,1042.87,151.781,104.976,536.2213,121.4188,35.9187,33.720225,109.567,57.0866,852.5958,379.656,56.7435,56.941654,108.421,56.8779,270.3063,138.7565,18.9772,22.073595,192.276,31.8826,63.4788,5.6044758,13234.8562498,1.02270040461788
-c9,11404.593359
-
-Leakage Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,34.089465,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,34.089465,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,43.468097,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,34.089465,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,43.468097,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,34.089465,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,22.378472,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,43.468097,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,43.468097,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,42.292967,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,42.292967,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,50.50002,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,42.292967,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,50.50002,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,42.292967,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,27.453113,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,50.50002,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,50.50002,0
-c0,0
-
-Memory Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,10.195835,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,10.195835,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,12.413397,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,10.195835,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,12.413397,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,10.195835,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,6.695458,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,12.413397,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,12.413397,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3107.8755,0
-c2,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3638.4402,0
-c3,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3107.8755,0
-c4,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3208.1335,0
-c5,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3631.0672,0
-c6,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2413.1972,0
-c7,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2405.8242,0
-c8,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c9,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c10,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3072.2612,0
-c11,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c12,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2972.0032,0
-c13,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3208.1335,0
-c14,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3774.3125,0
-c15,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3631.0672,0
-c16,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3107.8755,0
-c17,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3107.8755,0
-c18,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c19,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c20,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3631.0672,0
-c21,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3107.8755,0
-c22,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2972.0032,0
-c23,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c24,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2405.8242,0
-c25,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c26,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3774.3125,0
-c27,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2413.1972,0
-c28,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c29,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c30,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c31,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c32,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3631.0672,0
-c33,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c34,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c35,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c36,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2972.0032,0
-c37,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,4433.4333318,0
-c38,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,31.2502,127.493,0,0,0,3231.0044,0
-c39,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2972.0032,0
-c40,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c41,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2413.1972,0
-c42,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2413.1972,0
-c43,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3072.2612,0
-c44,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,3766.9963318,0
-c45,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c46,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c47,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,31.2502,127.493,0,0,0,4456.2474,0
-c48,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c49,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c50,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c51,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c52,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,66.8737,270.154,0,13.5633,34.4345,0,0,0,0,0,0,2742.8519,0
-c53,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c54,0,151.853,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3107.8755,0
-c55,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2413.1972,0
-c56,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2972.0032,0
-c57,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c58,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c59,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,2413.1972,0
-c60,0,151.853,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1882.6325,0
-c61,15.9807,0,0,140.444,219.142,0,0,0,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,1746.7602,0
-c62,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,0,0,0,141.551,559.805,0,134.318,532.119,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0,3638.4402,0
-c63,15.9807,0,0,140.444,219.142,0,289.649,935.594,0,69.3933,124.248,0,138.363,520.701,0,141.551,559.805,0,0,0,0,31.7333,58.0773,0,0,0,0,67.5188,270.869,0,0,0,0,13.5633,34.4345,0,0,0,0,0,0.0568318,3631.1240318,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,856.29757,0
-c2,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,993.4163,0
-c3,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,856.29757,0
-c4,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,895.65197,0
-c5,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,987.2553,0
-c6,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,666.7443,0
-c7,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,660.5833,0
-c8,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c9,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c10,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,846.677,0
-c11,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c12,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,807.3226,0
-c13,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,895.65197,0
-c14,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,1042.39127,0
-c15,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,987.2553,0
-c16,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,856.29757,0
-c17,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,856.29757,0
-c18,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c19,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c20,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,987.2553,0
-c21,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,856.29757,0
-c22,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,807.3226,0
-c23,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c24,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,660.5833,0
-c25,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c26,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,1042.39127,0
-c27,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,666.7443,0
-c28,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c29,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c30,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c31,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c32,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,987.2553,0
-c33,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c34,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c35,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c36,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,807.3226,0
-c37,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,1222.627558,0
-c38,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,9.58326,35.0054,0,0,0,891.26566,0
-c39,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,807.3226,0
-c40,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c41,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,666.7443,0
-c42,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,666.7443,0
-c43,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,846.677,0
-c44,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,1036.533858,0
-c45,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c46,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c47,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,9.58326,35.0054,0,0,0,1217.93766,0
-c48,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c49,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c50,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c51,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c52,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,18.9605,73.8267,0,4.99161,10.5834,0,0,0,0,0,0,753.3705,0
-c53,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c54,0,56.5113,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,856.29757,0
-c55,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,666.7443,0
-c56,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,807.3226,0
-c57,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c58,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c59,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,666.7443,0
-c60,0,56.5113,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,529.62557,0
-c61,7.53633,0,0,38.0284,59.1021,0,0,0,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,480.6506,0
-c62,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,0,0,0,37.842,149.921,0,37.6677,148.426,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0,993.4163,0
-c63,7.53633,0,0,38.0284,59.1021,0,75.228,251.444,0,19.2796,34.054,0,37.9227,142.01,0,37.842,149.921,0,0,0,0,9.74596,16.9199,0,0,0,0,19.097,73.5493,0,0,0,0,4.99161,10.5834,0,0,0,0,0,0.303588,987.558888,0
-c0,0
-
-Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,166.211,52.3819,38.6469,138.263,52.1919,38.1309,261.935,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,292.058,45.663,34.4299,103.885,15.8328,8.79131,198.587,31.2316,17.2031,162.139,31.2579,17.184,233.12,31.2863,17.1913,53.1921,25.9908,4.43176,119.521,51.9366,8.7572,26.725,2.837158,3056.760228,0.999999967285626
-c1,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3306.772394,0.924393871530734
-c2,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3480.343124,0.87829275196796
-c3,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3306.772394,0.924393871530734
-c4,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,0.196023,3325.110829,0.9192957147205
-c5,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3393.334124,0.900813190277717
-c6,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3135.659124,0.974838147144271
-c7,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3048.650124,1.0026601949729
-c8,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2962.088394,1.03196114302181
-c9,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c10,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3245.563824,0.941827152254235
-c11,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c12,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3283.429424,0.930965688666933
-c13,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3268.906794,0.935101649303812
-c14,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3503.686094,0.872441211554462
-c15,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3393.334124,0.900813190277717
-c16,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3306.772394,0.924393871530734
-c17,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3306.772394,0.924393871530734
-c18,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2962.088394,1.03196114302181
-c19,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2962.088394,1.03196114302181
-c20,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3393.334124,0.900813190277717
-c21,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3306.772394,0.924393871530734
-c22,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3283.429424,0.930965688666933
-c23,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c24,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3048.650124,1.0026601949729
-c25,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c26,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3503.686094,0.872441211554462
-c27,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3135.659124,0.974838147144271
-c28,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c29,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c30,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c31,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c32,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3393.334124,0.900813190277717
-c33,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c34,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c35,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2962.088394,1.03196114302181
-c36,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,63.1947,49.3667,6.98942,15.6197,0.196023,3339.633459,0.915298093038477
-c37,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3616.535517,0.845217786223727
-c38,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,129.10426,84.3721,6.98942,15.6197,0.196023,3402.682819,0.898338252715695
-c39,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3283.429424,0.930965688666933
-c40,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c41,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3135.659124,0.974838147144271
-c42,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3135.659124,0.974838147144271
-c43,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3245.563824,0.941827152254235
-c44,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3419.621817,0.893888360231843
-c45,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2962.088394,1.03196114302181
-c46,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c47,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,129.10426,84.3721,6.98942,15.6197,0.196023,3747.366819,0.815708814768453
-c48,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c49,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c50,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c51,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c52,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,252.0805,101.4722,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3360.375786,0.909648304743251
-c53,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c54,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3306.772394,0.924393871530734
-c55,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3135.659124,0.974838147144271
-c56,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3283.429424,0.930965688666933
-c57,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c58,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c59,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,6.340453,3141.803554,0.972931654754515
-c60,166.211,100.5876,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2962.088394,1.03196114302181
-c61,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,243.923,86.2346,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,2938.745424,1.0401581909819
-c62,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,186.198,40.5929,27.7158,235.141,190.4511,27.3118,329.7257,188.9254,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,0.196023,3480.343124,0.87829275196796
-c63,199.37933,44.0763,27.7776,176.2914,105.6324,27.6531,337.163,337.6786,54.8624,112.1726,56.6535,15.858,154.0927,182.6029,27.7158,235.141,190.4511,27.3118,281.238,40.4994,26.741,113.63096,32.2143,7.99054,8.309738,27.6042,13.5937,181.236,101.1854,13.5283,14.181538,27.6455,13.524,58.18371,36.6739,3.6779,6.990665,49.3667,6.98942,15.6197,3.140746,3396.278847,0.900032145681113
-c9,2938.745424
-
-Unpatch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/confs/HA_loss1.txt b/llvm/projects/soc_simulator/mobilenet_shallow/confs/HA_loss1.txt
deleted file mode 100644
index 6e5e15bc0bd2bc835293154ef3c5911662a51062..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/confs/HA_loss1.txt
+++ /dev/null
@@ -1,24 +0,0 @@
-9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9 9
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/confs/HA_loss2.txt b/llvm/projects/soc_simulator/mobilenet_shallow/confs/HA_loss2.txt
deleted file mode 100644
index 36bb418742d308ee60d0d997a3a436ca8410870a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/confs/HA_loss2.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9 9
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,9 9
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/confs/HS_loss1.txt b/llvm/projects/soc_simulator/mobilenet_shallow/confs/HS_loss1.txt
deleted file mode 100644
index 474eb6a9cc983edec9a18cc27a2ed3875d992bc7..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/confs/HS_loss1.txt
+++ /dev/null
@@ -1,64 +0,0 @@
-9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9 9
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,8,8,8,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,7,8,8,8,8,8,5,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,7,8,8,8,8,8,5,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,3,8,8,8,8,8,5,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,3,8,8,8,8,8,5,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,8,8,3,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,8,8,8,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,8,8,6,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,5,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,5,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,8,8,7,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,5
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,8,8,4,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,5
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,8,8,4,8,8,8,8 8
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/confs/HS_loss2.txt b/llvm/projects/soc_simulator/mobilenet_shallow/confs/HS_loss2.txt
deleted file mode 100644
index 574b3850329006a5bd6a1d0b4cb56e72521363ff..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/confs/HS_loss2.txt
+++ /dev/null
@@ -1,75 +0,0 @@
-9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9 9
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,8,8,8,8,5
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,7,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,6,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,7,8,8,8,8,8,5,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,5,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,6,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,5
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,7,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,5,8,8,8,5
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,6,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,8 8
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,8,8,3,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,8,8,3,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,3,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,3,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,8,8,6,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,6,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,3
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,4,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,8,8,3,8,8,8,7
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,8,8,3,8,8,8,5
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,4
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,3,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,5,8,8,8,8,8,6,8,8,8,8,8,3,8,8,8,6
-8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,7,8,8,8,8,8,4,8,8,8,8,8,3,8,8,8,6
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/info/mobilenet_shallow_layers.txt b/llvm/projects/soc_simulator/mobilenet_shallow/info/mobilenet_shallow_layers.txt
deleted file mode 100644
index 4ab1093cab28b2f3dfa284a4669c6a2885ff667d..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/info/mobilenet_shallow_layers.txt
+++ /dev/null
@@ -1,41 +0,0 @@
-Conv1,4500,3,32,32,32,3,3,3,1,1
-NML1
-NML2
-NML3
-NML4
-NML5
-Conv3,4500,32,32,32,64,32,1,1,1,1
-NML6
-NML7
-NML8
-NML9
-NML10
-Conv5,4500,64,16,16,128,64,1,1,1,1
-NML11
-NML12
-NML13
-NML14
-NML15
-Conv7,4500,128,16,16,128,128,1,1,1,1
-NML16
-NML17
-NML18
-NML19
-NML20
-Conv9,4500,128,8,8,256,128,1,1,1,1
-NML21
-NML22
-NML23
-NML24
-NML25
-Conv11,4500,256,8,8,256,256,1,1,1,1
-NML26
-NML27
-NML28
-NML29
-NML30
-Conv13,4500,256,4,4,512,256,1,1,1,1
-NML31
-NML32
-NML33
-FC1,4500,1024,1024,10
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/info/mobilenet_shallow_tensors.txt b/llvm/projects/soc_simulator/mobilenet_shallow/info/mobilenet_shallow_tensors.txt
deleted file mode 100644
index 76a5b9f8f5db9a4e924d492d32bf213977b9fe84..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/info/mobilenet_shallow_tensors.txt
+++ /dev/null
@@ -1,83 +0,0 @@
-#Conv1,1
-Conv1,165.401,567.689,190.845,489.961,7.45501,15.3385,38.3605,99.4665
-#NML1,1
-BatchNorm1,52.2864,184.046,44.208,126.823,57.7603,152.189,37.5847,103.341
-#NML2,1
-Relu1,38.6453,138.524,27.6261,85.2148,53.911,151.346,37.5139,107.477
-#NML3,1
-Conv2,137.297,505.587,146.888,454.401,57.2503,165.991,37.8788,118.31
-#NML4,1
-BatchNorm2,52.1549,197.914,46.4921,157.667,58.2651,183.446,38.5037,124.791
-#NML5,1
-Relu2,38.1169,146.069,27.6025,98.9958,53.3869,174.312,37.5912,124.95
-#Conv3,1
-Conv3,258.954,1032.03,243.635,792.454,56.4655,189.556,75.237,260.038
-#NML6,1
-BatchNorm3,97.6062,409.331,86.3211,303.765,250.71,842.032,74.8439,261.343
-#NML7,1
-Relu3,68.9786,295.295,54.9261,205.544,110.52,385.151,74.8142,272.084
-#NML8,1
-Conv4,99.7425,435.587,87.5137,315.332,233.217,831.941,19.1707,65.0227
-#NML9,1
-BatchNorm4,23.3301,99.2457,22.6355,87.6934,34.3529,118.103,20.3281,69.1027
-#NML10,1
-Relu4,17.5398,73.9864,15.9996,64.6513,31.5324,110.256,20.234,70.3149
-#Conv5,1
-Conv5,118.361,532.734,185.585,673.394,31.7649,112.807,37.938,132.277
-#NML11,1
-BatchNorm5,46.095,210.096,40.6227,149.173,142.468,500.126,37.5912,130.645
-#NML12,1
-Relu5,34.7036,158.225,27.7104,106.31,53.5821,188.448,37.6388,132.982
-#NML13,1
-Conv6,211.862,941.502,323.448,1119.27,115.938,414.557,37.786,129.099
-#NML14,1
-BatchNorm6,46.239,219.088,40.6142,145.624,149.523,514.012,37.5237,127.816
-#NML15,1
-Relu6,34.7264,164.507,27.2599,102.738,48.5245,167.544,37.4762,130.429
-#Conv7,1
-Conv7,292.43,1371.64,282.611,981.699,64.6579,227.877,37.7122,126.901
-#NML16,1
-BatchNorm7,45.4864,217.634,40.5067,143.829,148.787,503.668,37.3823,125.643
-#NML17,1
-Relu7,34.3235,164.29,26.7374,100.069,46.8449,159.109,37.4335,128.935
-#NML18,1
-Conv8,107.339,520.234,39.6826,147.475,64.2402,223.151,9.65537,29.9378
-#NML19,1
-BatchNorm8,15.8015,71.2826,15.3482,60.7097,16.8328,55.515,10.2509,31.976
-#NML20,1
-Relu8,8.79447,36.7775,7.99351,34.5499,15.1718,49.6238,10.0626,31.6603
-#Conv9,1
-Conv9,199.034,954.881,98.1388,359.856,15.2921,50.2916,18.9832,64.2926
-#NML21,1
-BatchNorm9,31.138,148.359,27.6217,104.698,70.2457,248.698,18.8196,63.4597
-#NML22,1
-Relu9,17.2023,79.3874,13.5987,54.9348,24.3837,83.7626,18.7985,64.38
-#NML23,1
-Conv10,164.266,793.877,160.813,577.662,59.6641,211.628,18.9793,63.4051
-#NML24,1
-BatchNorm10,31.2193,145.953,27.6333,103.328,74.0125,258.461,18.8411,62.7266
-#NML25,1
-Relu10,17.2051,77.3085,13.5078,53.7829,24.1874,81.846,18.7678,63.1334
-#Conv11,1
-Conv11,233.349,1117.62,166.85,596.404,35.2647,122.36,18.9644,63.7352
-#NML26,1
-BatchNorm11,31.252,148.499,27.646,103.913,74.0691,259.032,18.7909,62.9738
-#NML27,1
-Relu11,17.1975,78.9983,13.5936,54.7397,24.3747,82.9966,18.8176,63.5157
-#NML28,1
-Conv12,54.8812,263.615,24.574,94.3137,35.5476,124.286,4.91026,13.2274
-#NML29,1
-BatchNorm12,25.9899,121.522,26.0654,100.201,10.5018,32.8201,4.89614,13.1409
-#NML30,1
-Relu12,4.36155,14.8101,3.67647,18.1925,7.42945,22.2844,4.76369,12.6395
-#Conv13,1
-Conv13,119.767,577.124,63.594,236.007,8.73277,27.0192,9.60037,30.5066
-#NML31,1
-BatchNorm13,51.9324,249.047,49.3767,184.971,35.341,123.97,9.4716,29.8925
-#NML32,1
-Relu13,8.7646,36.7714,6.99017,30.535,13.4667,44.1842,9.50975,29.6265
-#NML33,1
-Pool1,26.8288,124.626,15.5609,61.4365,33.2992,116.519,2.61816,4.58362
-#FC1,2
-Mul1,2.40501,4.96909,5.46799,21.0966,6.83807,19.3828,0.310624,0.0619082
-Add1,0.43324,0.179815,0.940664,3.68353,1.92657,2.71477,0.208831,0.0318638
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/mobilenet_shallow_layers.txt b/llvm/projects/soc_simulator/mobilenet_shallow/mobilenet_shallow_layers.txt
deleted file mode 100644
index ba85aa142542d34722b19b9a16314100ecdd62da..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/mobilenet_shallow_layers.txt
+++ /dev/null
@@ -1,41 +0,0 @@
-Conv1,2000,3,32,32,32,3,3,3,1,1
-NML1
-NML2
-NML3
-NML4
-NML5
-Conv3,2000,32,32,32,64,32,1,1,1,1
-NML6
-NML7
-NML8
-NML9
-NML10
-Conv5,2000,64,16,16,128,64,1,1,1,1
-NML11
-NML12
-NML13
-NML14
-NML15
-Conv7,2000,128,16,16,128,128,1,1,1,1
-NML16
-NML17
-NML18
-NML19
-NML20
-Conv9,2000,128,8,8,256,128,1,1,1,1
-NML21
-NML22
-NML23
-NML24
-NML25
-Conv11,2000,256,8,8,256,256,1,1,1,1
-NML26
-NML27
-NML28
-NML29
-NML30
-Conv13,2000,256,4,4,512,256,1,1,1,1
-NML31
-NML32
-NML33
-FC1,2000,1024,1024,10
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/mobilenet_shallow_tensors.txt b/llvm/projects/soc_simulator/mobilenet_shallow/mobilenet_shallow_tensors.txt
deleted file mode 100644
index 80f75053287bfc0751dccd29ae41a62138b59419..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/mobilenet_shallow_tensors.txt
+++ /dev/null
@@ -1,83 +0,0 @@
-#Conv1,1
-Conv1,166.211,572.966,191.843,507.609,7.53633,15.9807,38.0736,101.279
-#NML1,1
-BatchNorm1,52.3819,184.509,44.0763,129.571,56.5113,151.853,37.5819,105.16
-#NML2,1
-Relu1,38.6469,138.685,27.7776,87.353,55.2939,157.83,37.5253,110.145
-#NML3,1
-Conv2,138.263,510.748,3084.84,10928.7,59.002,175.657,38.0284,140.444
-#NML4,1
-BatchNorm2,52.1919,198.623,46.5303,185.039,59.1021,219.142,38.507,144.452
-#NML5,1
-Relu2,38.1309,146.196,27.6531,114.322,54.9847,208.258,37.5913,143.1
-#Conv3,1
-Conv3,261.935,1046.81,243.923,928.071,56.2446,214.924,75.228,289.649
-#NML6,1
-BatchNorm3,97.7271,410.676,86.2346,331.516,251.444,935.594,74.8545,283.758
-#NML7,1
-Relu3,68.9954,295.687,54.8624,220.807,110.125,417.751,74.8592,291.745
-#NML8,1
-Conv4,92.893,405.855,1583.56,6051.66,231.224,880.293,19.2796,69.3933
-#NML9,1
-BatchNorm4,23.3196,99.5467,22.5995,92.5426,34.054,124.248,20.2424,73.0214
-#NML10,1
-Relu4,17.6047,73.891,15.858,67.4597,30.9943,114.518,20.2169,73.2485
-#Conv5,1
-Conv5,116.17,524.623,186.198,709.5,31.8313,117.965,37.9227,138.363
-#NML11,1
-BatchNorm5,46.1234,211.449,40.5929,155.327,142.01,520.701,37.5993,135.493
-#NML12,1
-Relu5,34.6677,159.137,27.7158,109.967,53.3662,195.421,37.6391,138.68
-#NML13,1
-Conv6,197.299,873.476,3302.47,12220,115.346,427.277,37.842,141.551
-#NML14,1
-BatchNorm6,46.2495,219.41,40.5301,157.738,149.921,559.805,37.4612,137.044
-#NML15,1
-Relu6,34.6984,164.603,27.3118,111.071,48.6817,179.964,37.5311,140.008
-#Conv7,1
-Conv7,292.058,1372.91,281.238,1042.87,64.7019,243.41,37.6677,134.318
-#NML16,1
-BatchNorm7,45.663,218.702,40.4994,151.781,148.426,532.119,37.3944,132.668
-#NML17,1
-Relu7,34.4299,165.077,26.741,104.976,46.7743,167.757,37.3853,135.277
-#NML18,1
-Conv8,103.885,504.488,794.673,2993.53,64.3356,233.98,9.74596,31.7333
-#NML19,1
-BatchNorm8,15.8328,71.2665,15.2944,63.3415,16.9199,58.0773,10.2545,33.5749
-#NML20,1
-Relu8,8.79131,37.2098,7.99054,35.9187,15.5217,53.4902,10.0192,32.9858
-#Conv9,1
-Conv9,198.587,954.913,97.6367,372.978,15.3789,53.2605,18.9755,67.0018
-#NML21,1
-BatchNorm9,31.2316,148.915,27.6042,109.567,69.3636,255.159,18.8095,66.0347
-#NML22,1
-Relu9,17.2031,79.6361,13.5937,57.0866,24.73,88.498,18.8081,66.8284
-#NML23,1
-Conv10,162.139,785.077,1672.38,6225.71,59.6133,219.941,19.097,67.5188
-#NML24,1
-BatchNorm10,31.2579,146.853,27.6361,108.787,73.5493,270.869,18.7658,66.0488
-#NML25,1
-Relu10,17.184,77.9379,13.5283,56.7435,24.4533,86.8627,18.8086,66.1414
-#Conv11,1
-Conv11,233.12,1118.97,166.111,623.585,35.1905,128.418,18.9605,66.8737
-#NML26,1
-BatchNorm11,31.2863,148.577,27.6455,108.421,73.8267,270.154,18.8306,66.0084
-#NML27,1
-Relu11,17.1913,78.6669,13.524,56.8779,24.5173,87.138,18.7795,66.7656
-#NML28,1
-Conv12,53.1921,256.743,396.977,1509.34,35.3114,127.967,4.99161,13.5633
-#NML29,1
-BatchNorm12,25.9908,121.755,26.0905,104.322,10.5834,34.4345,4.90361,13.3056
-#NML30,1
-Relu12,4.43176,15.0166,3.6779,18.9772,7.84663,24.5065,4.76517,12.9712
-#Conv13,1
-Conv13,119.521,577.914,63.1947,243.935,8.92772,28.6239,9.58326,31.2502
-#NML31,1
-BatchNorm13,51.9366,250.554,49.3667,192.276,35.0054,127.493,9.47022,30.7329
-#NML32,1
-Relu13,8.7572,36.6268,6.98942,31.8826,13.5881,46.2214,9.4503,31.2116
-#NML33,1
-Pool1,26.725,125.046,15.6197,63.4788,33.2369,120.141,2.60648,5.14839
-#FC1,2
-Mul1,2.40563,5.37123,5.40178,21.3448,6.87904,20.2727,0.303588,0.0568318
-Add1,0.431528,0.176414,0.938673,3.94623,2.02678,2.85623,0.208607,0.0415936
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/result_test_conf.out b/llvm/projects/soc_simulator/mobilenet_shallow/result_test_conf.out
deleted file mode 100644
index 7907f1be58e22b0ac7dd4873471aac1b5205bf31..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/result_test_conf.out
+++ /dev/null
@@ -1,66 +0,0 @@
-Compute Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,572.966,184.509,138.685,510.748,198.623,146.196,1046.81,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,1372.91,218.702,165.077,504.488,71.2665,37.2098,954.913,148.915,79.6361,785.077,146.853,77.9379,1118.97,148.577,78.6669,256.743,121.755,15.0166,577.914,250.554,36.6268,125.046,5.547644,13535.292944,0.999999992611907
-c1,572.966,184.509,138.685,510.748,198.623,146.196,31.422697,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,15.711348,218.702,165.077,504.488,71.2665,37.2098,7.855674,148.915,79.6361,785.077,146.853,77.9379,15.711348,148.577,78.6669,256.743,121.755,15.0166,5.906058,250.554,36.6268,125.046,5.547644,8540.383069,1.58485780744951
-c1,8540.383069
-
-Compute Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,166.211,52.3819,38.6469,138.263,52.1919,38.1309,261.935,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,292.058,45.663,34.4299,103.885,15.8328,8.79131,198.587,31.2316,17.2031,162.139,31.2579,17.184,233.12,31.2863,17.1913,53.1921,25.9908,4.43176,119.521,51.9366,8.7572,26.725,2.837158,3056.760228,0.999999967285626
-c1,166.211,52.3819,38.6469,138.263,52.1919,38.1309,16.261248,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,8.303616,45.663,34.4299,103.885,15.8328,8.79131,4.151808,31.2316,17.2031,162.139,31.2579,17.184,8.303616,31.2863,17.1913,53.1921,25.9908,4.43176,4.613120,51.9366,8.7572,26.725,2.837158,1993.172636,1.53361531230578
-c1,1993.172636
-
-Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,572.966,184.509,138.685,510.748,198.623,146.196,1046.81,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,1372.91,218.702,165.077,504.488,71.2665,37.2098,954.913,148.915,79.6361,785.077,146.853,77.9379,1118.97,148.577,78.6669,256.743,121.755,15.0166,577.914,250.554,36.6268,125.046,5.547644,13535.292944,0.999999992611907
-c1,572.966,184.509,138.685,510.748,198.623,146.196,152.598914,410.676,295.687,405.855,99.5467,73.891,524.623,211.449,159.137,873.476,219.41,164.603,91.934874,218.702,165.077,504.488,71.2665,37.2098,38.09458,148.915,79.6361,785.077,146.853,77.9379,60.851377,148.577,78.6669,256.743,121.755,15.0166,24.49524,250.554,36.6268,125.046,5.547644,8831.750929,1.53257184215853
-c1,8831.750929
-
-Leakage Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,50.323486,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,122.707169,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,70.852731,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,168.660691,0
-c0,0
-
-Memory Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,16.574126,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,39.762784,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,166.211,52.3819,38.6469,138.263,52.1919,38.1309,261.935,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,292.058,45.663,34.4299,103.885,15.8328,8.79131,198.587,31.2316,17.2031,162.139,31.2579,17.184,233.12,31.2863,17.1913,53.1921,25.9908,4.43176,119.521,51.9366,8.7572,26.725,2.837158,3056.760228,0.999999967285626
-c1,166.211,52.3819,38.6469,138.263,52.1919,38.1309,32.835374,97.7271,68.9954,92.893,23.3196,17.6047,116.17,46.1234,34.6677,197.299,46.2495,34.6984,19.078877,45.663,34.4299,103.885,15.8328,8.79131,8.309738,31.2316,17.2031,162.139,31.2579,17.184,14.181538,31.2863,17.1913,53.1921,25.9908,4.43176,6.990665,51.9366,8.7572,26.725,2.837158,2032.93542,1.50361887916642
-c1,2032.93542
-
-Unpatch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/results/HA_loss1_results.txt b/llvm/projects/soc_simulator/mobilenet_shallow/results/HA_loss1_results.txt
deleted file mode 100644
index d1d1c5e872107045ebcbe8e4f248ddb20d584f2a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/results/HA_loss1_results.txt
+++ /dev/null
@@ -1,319 +0,0 @@
-Compute Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,567.689,184.046,138.524,505.587,197.914,146.069,1032.03,409.331,295.295,435.587,99.2457,73.9864,532.734,210.096,158.225,941.502,219.088,164.507,1371.64,217.634,164.29,520.234,71.2826,36.7775,954.881,148.359,79.3874,793.877,145.953,77.3085,1117.62,148.499,78.9983,263.615,121.522,14.8101,577.124,249.047,36.7714,124.626,5.148905,13630.861805,0.999999992663707
-c1,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8898.261135,1.5318567802196
-c2,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8898.261135,1.5318567802196
-c3,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c4,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c5,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9126.405478,1.49356301213025
-c6,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8898.261135,1.5318567802196
-c7,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c8,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9707.09813,1.40421591314215
-c9,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c10,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8898.261135,1.5318567802196
-c11,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8898.261135,1.5318567802196
-c12,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c13,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c14,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9126.405478,1.49356301213025
-c15,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8898.261135,1.5318567802196
-c16,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9707.09813,1.40421591314215
-c17,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8898.261135,1.5318567802196
-c18,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8898.261135,1.5318567802196
-c19,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c20,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9126.405478,1.49356301213025
-c21,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c22,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8898.261135,1.5318567802196
-c23,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-
-c1,8898.261135
-
-Compute Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,165.401,52.2864,38.6453,137.297,52.1549,38.1169,258.954,97.6062,68.9786,99.7425,23.3301,17.5398,118.361,46.095,34.7036,211.862,46.239,34.7264,292.43,45.4864,34.3235,107.339,15.8015,8.79447,199.034,31.138,17.2023,164.266,31.2193,17.2051,233.349,31.252,17.1975,54.8812,25.9899,4.36155,119.767,51.9324,8.7646,26.8288,2.83825,3083.44147,0.999999967568706
-c1,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2580.93424,1.1946996954599
-c2,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2580.93424,1.1946996954599
-c3,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c4,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c5,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2639.91512,1.16800776276444
-c6,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2580.93424,1.1946996954599
-c7,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c8,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2798.461504,1.10183447419563
-c9,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c10,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2580.93424,1.1946996954599
-c11,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2580.93424,1.1946996954599
-c12,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c13,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c14,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2639.91512,1.16800776276444
-c15,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2580.93424,1.1946996954599
-c16,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2798.461504,1.10183447419563
-c17,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2580.93424,1.1946996954599
-c18,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2580.93424,1.1946996954599
-c19,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c20,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2639.91512,1.16800776276444
-c21,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c22,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2580.93424,1.1946996954599
-c23,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-
-c1,2580.93424
-
-Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,567.689,184.046,138.524,505.587,197.914,146.069,1032.03,409.331,295.295,435.587,99.2457,73.9864,532.734,210.096,158.225,941.502,219.088,164.507,1371.64,217.634,164.29,520.234,71.2826,36.7775,954.881,148.359,79.3874,793.877,145.953,77.3085,1117.62,148.499,78.9983,263.615,121.522,14.8101,577.124,249.047,36.7714,124.626,5.148905,13630.861805,0.999999992663707
-c1,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8977.328846,1.51836497102776
-c2,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8977.328846,1.51836497102776
-c3,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c4,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c5,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9186.884007,1.48373068020025
-c6,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8977.328846,1.51836497102776
-c7,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c8,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9722.43663,1.40200056668304
-c9,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c10,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8977.328846,1.51836497102776
-c11,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8977.328846,1.51836497102776
-c12,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c13,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c14,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9186.884007,1.48373068020025
-c15,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8977.328846,1.51836497102776
-c16,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9722.43663,1.40200056668304
-c17,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8977.328846,1.51836497102776
-c18,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8977.328846,1.51836497102776
-c19,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c20,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9186.884007,1.48373068020025
-c21,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c22,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8977.328846,1.51836497102776
-c23,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-
-c1,8977.328846
-
-Leakage Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,21.344501,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,21.344501,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,21.344501,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,30.978009,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-
-c0,0
-
-Memory Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,23.795528,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,23.795528,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,23.795528,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,32.751202,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-
-c0,0
-
-Memory Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,5.877922,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,5.877922,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,5.877922,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,8.255467,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-
-c0,0
-
-Patch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-
-c0,0
-
-Quantization Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c2,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c3,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c4,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c5,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c6,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c7,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c8,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c9,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c10,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c11,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c12,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c13,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c14,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c15,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c16,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c17,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c18,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c19,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c20,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c21,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c22,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c23,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-
-c0,0
-
-Quantization Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c2,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c3,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c4,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c5,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c6,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c7,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c8,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c9,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c10,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c11,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c12,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c13,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c14,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c15,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c16,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c17,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c18,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c19,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c20,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c21,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c22,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c23,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-
-c0,0
-
-Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,165.401,52.2864,38.6453,137.297,52.1549,38.1169,258.954,97.6062,68.9786,99.7425,23.3301,17.5398,118.361,46.095,34.7036,211.862,46.239,34.7264,292.43,45.4864,34.3235,107.339,15.8015,8.79447,199.034,31.138,17.2023,164.266,31.2193,17.2051,233.349,31.252,17.1975,54.8812,25.9899,4.36155,119.767,51.9324,8.7646,26.8288,2.83825,3083.44147,0.999999967568706
-c1,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2596.644717,1.18747140533545
-c2,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2596.644717,1.18747140533545
-c3,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c4,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c5,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2653.248052,1.16213836526211
-c6,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2596.644717,1.18747140533545
-c7,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c8,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2805.916514,1.0989070219034
-c9,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c10,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2596.644717,1.18747140533545
-c11,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2596.644717,1.18747140533545
-c12,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c13,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c14,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2653.248052,1.16213836526211
-c15,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2596.644717,1.18747140533545
-c16,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2805.916514,1.0989070219034
-c17,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2596.644717,1.18747140533545
-c18,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2596.644717,1.18747140533545
-c19,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c20,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2653.248052,1.16213836526211
-c21,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c22,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2596.644717,1.18747140533545
-c23,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-
-c1,2596.644717
-
-Unpatch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-
-c0,0
-
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/results/HA_loss2_results.txt b/llvm/projects/soc_simulator/mobilenet_shallow/results/HA_loss2_results.txt
deleted file mode 100644
index 1127c71bb854e69baaaf74a21f82b84266251ef4..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/results/HA_loss2_results.txt
+++ /dev/null
@@ -1,385 +0,0 @@
-Compute Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,567.689,184.046,138.524,505.587,197.914,146.069,1032.03,409.331,295.295,435.587,99.2457,73.9864,532.734,210.096,158.225,941.502,219.088,164.507,1371.64,217.634,164.29,520.234,71.2826,36.7775,954.881,148.359,79.3874,793.877,145.953,77.3085,1117.62,148.499,78.9983,263.615,121.522,14.8101,577.124,249.047,36.7714,124.626,5.148905,13630.861805,0.999999992663707
-c1,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,5.148905,7558.685333,1.80333762078388
-c2,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,11.801625,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,7928.36376,1.71925280495388
-c3,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,7572.329858,1.80008820014496
-c4,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8512.966135,1.60118828487283
-c5,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8156.508103,1.67116386887057
-c6,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9355.097804,1.45705175347997
-c7,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c8,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,24.78013,7924.38858,1.72011524868818
-c9,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8160.417826,1.67036320058689
-c10,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8505.022836,1.60268372085199
-c11,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,24.78013,7578.316558,1.79866617088515
-c12,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,11.801625,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,7928.36376,1.71925280495388
-c13,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,7572.329858,1.80008820014496
-c14,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8512.966135,1.60118828487283
-c15,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8156.508103,1.67116386887057
-c16,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c17,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8505.022836,1.60268372085199
-c18,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,24.78013,7924.38858,1.72011524868818
-c19,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8160.417826,1.67036320058689
-c20,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,15.711348,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,7.768049,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8460.779527,1.61106451248314
-c21,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,24.78013,7578.316558,1.79866617088515
-c22,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,11.801625,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,7928.36376,1.71925280495388
-c23,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,7572.329858,1.80008820014496
-c24,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8512.966135,1.60118828487283
-c25,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8156.508103,1.67116386887057
-c26,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,9478.953787,1.43801330479033
-c27,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,24.78013,7924.38858,1.72011524868818
-c28,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8160.417826,1.67036320058689
-c29,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8505.022836,1.60268372085199
-
-c1,7558.685333
-
-Compute Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,165.401,52.2864,38.6453,137.297,52.1549,38.1169,258.954,97.6062,68.9786,99.7425,23.3301,17.5398,118.361,46.095,34.7036,211.862,46.239,34.7264,292.43,45.4864,34.3235,107.339,15.8015,8.79447,199.034,31.138,17.2023,164.266,31.2193,17.2051,233.349,31.252,17.1975,54.8812,25.9899,4.36155,119.767,51.9324,8.7646,26.8288,2.83825,3083.44147,0.999999967568706
-c1,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,2.83825,2209.06946,1.3958100395897
-c2,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2306.626856,1.33677509576455
-c3,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c4,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2465.17324,1.25080107754208
-c5,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2365.607736,1.30344574577238
-c6,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2704.474512,1.14012587003719
-c7,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c8,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2306.626856,1.33677509576455
-c9,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2365.607736,1.30344574577238
-c10,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2465.17324,1.25080107754208
-c11,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c12,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2306.626856,1.33677509576455
-c13,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c14,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2465.17324,1.25080107754208
-c15,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2365.607736,1.30344574577238
-c16,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c17,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2465.17324,1.25080107754208
-c18,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2306.626856,1.33677509576455
-c19,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2365.607736,1.30344574577238
-c20,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,8.303616,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2462.633736,1.25209092189213
-c21,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c22,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2306.626856,1.33677509576455
-c23,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c24,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2465.17324,1.25080107754208
-c25,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2365.607736,1.30344574577238
-c26,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2739.480624,1.12555691412122
-c27,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2306.626856,1.33677509576455
-c28,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2365.607736,1.30344574577238
-c29,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2465.17324,1.25080107754208
-
-c1,2209.06946
-
-Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,567.689,184.046,138.524,505.587,197.914,146.069,1032.03,409.331,295.295,435.587,99.2457,73.9864,532.734,210.096,158.225,941.502,219.088,164.507,1371.64,217.634,164.29,520.234,71.2826,36.7775,954.881,148.359,79.3874,793.877,145.953,77.3085,1117.62,148.499,78.9983,263.615,121.522,14.8101,577.124,249.047,36.7714,124.626,5.148905,13630.861805,0.999999992663707
-c1,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,5.2108132,7744.2773842,1.76012053194244
-c2,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,88.025151,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8083.654997,1.68622506050001
-c3,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,7757.860001,1.75703887767233
-c4,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8623.117343,1.5807347974908
-c5,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8293.210158,1.64361705310089
-c6,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,9400.67521,1.44998751212055
-c7,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c8,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,24.78013,8079.679817,1.68705467853003
-c9,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8297.119881,1.64284255696121
-c10,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8615.174044,1.5821922548708
-c11,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,24.78013,7763.846701,1.75568402550709
-c12,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,88.025151,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8083.654997,1.68622506050001
-c13,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,7757.860001,1.75703887767233
-c14,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8623.117343,1.5807347974908
-c15,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8293.210158,1.64361705310089
-c16,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c17,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8615.174044,1.5821922548708
-c18,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,24.78013,8079.679817,1.68705467853003
-c19,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8297.119881,1.64284255696121
-c20,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,76.301921,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,52.908078,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8581.848629,1.58833629389647
-c21,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,24.78013,7763.846701,1.75568402550709
-c22,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,88.025151,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8083.654997,1.68622506050001
-c23,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,7757.860001,1.75703887767233
-c24,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8623.117343,1.5807347974908
-c25,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8293.210158,1.64361705310089
-c26,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,9512.881469,1.43288463186795
-c27,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,24.78013,8079.679817,1.68705467853003
-c28,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8297.119881,1.64284255696121
-c29,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8615.174044,1.5821922548708
-
-c1,7744.2773842
-
-Leakage Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,25.140123,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,46.484624,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,9.633508,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,38.549094,0
-
-c0,0
-
-Memory Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,35.450450,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,59.245978,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,8.955674,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,56.263614,0
-
-c0,0
-
-Memory Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.15793,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,8.307830,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,14.185752,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,2.377545,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,13.152806,0
-
-c0,0
-
-Patch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-
-c0,0
-
-Quantization Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0619082,15.4004082,0
-c2,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c3,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c4,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c5,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c6,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c7,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c8,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c9,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c10,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c11,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c12,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c13,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c14,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c15,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c16,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c17,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c18,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c19,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c20,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c21,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c22,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c23,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c24,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c25,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c26,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c27,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c28,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c29,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-
-c0,0
-
-Quantization Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.310624,7.765634,0
-c2,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c3,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c4,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c5,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c6,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c7,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c8,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c9,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c10,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c11,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c12,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c13,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c14,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c15,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c16,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c17,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c18,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c19,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c20,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c21,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c22,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c23,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c24,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c25,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c26,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c27,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c28,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c29,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-
-c0,0
-
-Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,165.401,52.2864,38.6453,137.297,52.1549,38.1169,258.954,97.6062,68.9786,99.7425,23.3301,17.5398,118.361,46.095,34.7036,211.862,46.239,34.7264,292.43,45.4864,34.3235,107.339,15.8015,8.79447,199.034,31.138,17.2023,164.266,31.2193,17.2051,233.349,31.252,17.1975,54.8812,25.9899,4.36155,119.767,51.9324,8.7646,26.8288,2.83825,3083.44147,0.999999967568706
-c1,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,3.148874,2240.023752,1.37652171303755
-c2,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2333.112594,1.32159988582189
-c3,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c4,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2485.781056,1.2404315893044
-c5,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2389.715929,1.29029618271854
-c6,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2716.087452,1.13525113272932
-c7,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c8,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2333.112594,1.32159988582189
-c9,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2389.715929,1.29029618271854
-c10,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2485.781056,1.2404315893044
-c11,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c12,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2333.112594,1.32159988582189
-c13,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c14,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2485.781056,1.2404315893044
-c15,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2389.715929,1.29029618271854
-c16,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c17,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2485.781056,1.2404315893044
-c18,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2333.112594,1.32159988582189
-c19,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2389.715929,1.29029618271854
-c20,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,16.611446,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2484.274498,1.24118383389758
-c21,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c22,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2333.112594,1.32159988582189
-c23,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c24,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2485.781056,1.2404315893044
-c25,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2389.715929,1.29029618271854
-c26,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2749.313179,1.12153150881428
-c27,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2333.112594,1.32159988582189
-c28,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2389.715929,1.29029618271854
-c29,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2485.781056,1.2404315893044
-
-c1,2240.023752
-
-Unpatch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-
-c0,0
-
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/results/HS_loss1_results.txt b/llvm/projects/soc_simulator/mobilenet_shallow/results/HS_loss1_results.txt
deleted file mode 100644
index 85b21baa2464f296262fc489ac6162bbd7d6810a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/results/HS_loss1_results.txt
+++ /dev/null
@@ -1,759 +0,0 @@
-Compute Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,567.689,184.046,138.524,505.587,197.914,146.069,1032.03,409.331,295.295,435.587,99.2457,73.9864,532.734,210.096,158.225,941.502,219.088,164.507,1371.64,217.634,164.29,520.234,71.2826,36.7775,954.881,148.359,79.3874,793.877,145.953,77.3085,1117.62,148.499,78.9983,263.615,121.522,14.8101,577.124,249.047,36.7714,124.626,5.148905,13630.861805,0.999999992663707
-c1,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.012918,8508.335449,1.60205973618448
-c2,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.012918,8510.352236,1.60168007937104
-c3,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,7.768049,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8507.217192,1.60227032378945
-c4,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.012918,8734.719406,1.56053801105311
-c5,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.012918,8514.264909,1.60094403810446
-c6,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.017407,8514.266448,1.60094374872512
-c7,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.012918,8508.833425,1.60196597629403
-c8,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8506.41178,1.60242203143824
-c9,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8501.325445,1.60338075901786
-c10,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.012918,8513.144062,1.60115481960753
-c11,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8502.223179,1.60321146101484
-c12,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.012918,8511.25292,1.60151058521816
-c13,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.012918,8513.144062,1.60115481960753
-c14,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.017407,8511.847753,1.60139866694114
-c15,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.012918,8510.355186,1.60167952417022
-c16,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.012918,8503.002589,1.60306450598137
-c17,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8513.20563,1.60114323996256
-c18,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8507.959901,1.60213045235261
-c19,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8511.250768,1.60151099014698
-c20,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8506.597534,1.60238704020969
-c21,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.017407,8503.007078,1.60306365967412
-c22,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8509.233981,1.60189056679448
-c23,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8511.250768,1.60151099014698
-c24,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8511.250768,1.60151099014698
-c25,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.039278,8507.959103,1.60213060262367
-c26,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8503.110341,1.60304419183775
-c27,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.029504,8510.371772,1.60167640263135
-c28,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8511.869624,1.60139455219416
-c29,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.017407,8509.23847,1.6018897217263
-c30,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8507.959901,1.60213045235261
-c31,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8503.120913,1.60304219875978
-c32,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.012918,8508.833425,1.60196597629403
-c33,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.017407,8511.255257,1.60151014547924
-c34,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,7.768049,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.039278,8506.34502,1.60243460766144
-c35,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8513.20563,1.60114323996256
-c36,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.012918,8743.773355,1.55892211470843
-c37,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,24.78013,8531.338386,1.59774012335443
-c38,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.039278,8737.165259,1.56010115923457
-c39,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.017407,8511.255257,1.60151014547924
-c40,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.039278,8507.064317,1.60229911716208
-c41,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.039278,8510.378596,1.601675118336
-c42,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8504.642672,1.60275536203321
-c43,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.017407,8508.774195,1.60197712765867
-c44,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,24.78013,8531.338386,1.59774012335443
-c45,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.017407,8515.16498,1.60077481492584
-c46,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.039278,8510.378596,1.601675118336
-c47,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.012918,8741.756568,1.55928176940649
-c48,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,7.768049,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8503.926325,1.60289037366642
-c49,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8506.43814,1.60241706580591
-c50,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,0.039278,8507.499157,1.60221721956478
-c51,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.039278,8510.381546,1.60167456313862
-c52,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.012918,9094.957561,1.49872735124984
-c53,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.017407,8508.835764,1.60196553592845
-c54,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8506.597534,1.60238704020969
-c55,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.019420,8510.358738,1.6016788556713
-c56,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8511.250768,1.60151099014698
-c57,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,7.768049,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,8507.217192,1.60227032378945
-c58,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,7.768049,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.017407,8507.221681,1.60226947832054
-c59,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,24.78013,8535.119448,1.59703232372346
-c60,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.017407,8509.641534,1.60181384731154
-c61,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8508.857635,1.60196141826785
-c62,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.019420,8511.25727,1.6015097667056
-c63,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,24.78013,8533.598487,1.59731696611147
-
-c9,8501.325445
-
-Compute Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,165.401,52.2864,38.6453,137.297,52.1549,38.1169,258.954,97.6062,68.9786,99.7425,23.3301,17.5398,118.361,46.095,34.7036,211.862,46.239,34.7264,292.43,45.4864,34.3235,107.339,15.8015,8.79447,199.034,31.138,17.2023,164.266,31.2193,17.2051,233.349,31.252,17.1975,54.8812,25.9899,4.36155,119.767,51.9324,8.7646,26.8288,2.83825,3083.44147,0.999999967568706
-c1,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c2,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c3,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c4,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2539.555514,1.21416575915947
-c5,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c6,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c7,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c8,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c9,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c10,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c11,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c12,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c13,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c14,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c15,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c16,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c17,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c18,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c19,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c20,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c21,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c22,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c23,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c24,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c25,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c26,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c27,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c28,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c29,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c30,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c31,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c32,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c33,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c34,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c35,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c36,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2539.555514,1.21416575915947
-c37,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2486.947248,1.23984991981423
-c38,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2539.555514,1.21416575915947
-c39,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c40,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c41,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c42,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c43,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c44,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2486.947248,1.23984991981423
-c45,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c46,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c47,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2539.555514,1.21416575915947
-c48,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c49,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c50,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c51,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c52,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2639.121018,1.16835921207615
-c53,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c54,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c55,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c56,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c57,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c58,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c59,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2486.947248,1.23984991981423
-c60,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c61,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c62,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c63,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2486.947248,1.23984991981423
-
-c1,2480.574634
-
-Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,567.689,184.046,138.524,505.587,197.914,146.069,1032.03,409.331,295.295,435.587,99.2457,73.9864,532.734,210.096,158.225,941.502,219.088,164.507,1371.64,217.634,164.29,520.234,71.2826,36.7775,954.881,148.359,79.3874,793.877,145.953,77.3085,1117.62,148.499,78.9983,263.615,121.522,14.8101,577.124,249.047,36.7714,124.626,5.148905,13630.861805,0.999999992663707
-c1,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.016415,8618.645563,1.58155496095141
-c2,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.016415,8620.66235,1.5811849592835
-c3,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,52.908078,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8617.527306,1.58176019208357
-c4,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.016415,8826.440338,1.54432150771853
-c5,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.016415,8624.575023,1.58046763006902
-c6,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.020904,8624.576562,1.58046734804478
-c7,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.016415,8619.143539,1.58146358570043
-c8,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8616.721894,1.58190804049283
-c9,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8611.635559,1.58284237103719
-c10,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.016415,8623.454176,1.58067305382904
-c11,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8612.533293,1.58267738225303
-c12,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.016415,8621.563034,1.58101977485328
-c13,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.016415,8623.454176,1.58067305382904
-c14,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.020904,8622.157867,1.58091070207366
-c15,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.016415,8620.6653,1.58118441820048
-c16,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.016415,8613.312703,1.58253416737082
-c17,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8623.515744,1.58066176853887
-c18,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8618.270015,1.58162387847135
-c19,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8621.560882,1.58102016948652
-c20,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8616.907648,1.5818739394261
-c21,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.020904,8613.317192,1.58253334260196
-c22,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8619.544095,1.58139009402689
-c23,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8621.560882,1.58102016948652
-c24,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8621.560882,1.58102016948652
-c25,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.042775,8618.269217,1.58162402492022
-c26,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8613.420455,1.58251437021584
-c27,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.033001,8620.681886,1.5811813760369
-c28,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8622.179738,1.5809066919395
-c29,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.020904,8619.548584,1.58138927045011
-c30,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8618.270015,1.58162387847135
-c31,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8613.431027,1.58251242786074
-c32,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.016415,8619.143539,1.58146358570043
-c33,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.020904,8621.565371,1.581019346295
-c34,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,52.908078,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.042775,8616.655134,1.58192029677765
-c35,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8623.515744,1.58066176853887
-c36,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.016415,8835.494287,1.5427390033834
-c37,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,24.78013,8640.645003,1.57752825657281
-c38,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.042775,8828.886191,1.54389368666975
-c39,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.020904,8621.565371,1.581019346295
-c40,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.042775,8617.374431,1.58178825301889
-c41,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.042775,8620.68871,1.5811801243989
-c42,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8614.952786,1.58223289034479
-c43,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.020904,8619.084309,1.58147445345433
-c44,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,24.78013,8640.645003,1.57752825657281
-c45,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.020904,8625.475094,1.58030270778378
-c46,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.042775,8620.68871,1.5811801243989
-c47,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.016415,8833.4775,1.54309122887231
-c48,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,52.908078,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8614.236439,1.58236446646058
-c49,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8616.748254,1.58190320118521
-c50,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,1.042775,8617.809271,1.58170843867463
-c51,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.042775,8620.69166,1.58117958331919
-c52,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.016415,9160.127646,1.48806459723799
-c53,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.020904,8619.145878,1.58146315653456
-c54,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8616.907648,1.5818739394261
-c55,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.022917,8620.668852,1.58118376670034
-c56,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8621.560882,1.58102016948652
-c57,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,52.908078,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,8617.527306,1.58176019208357
-c58,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,52.908078,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.020904,8617.531795,1.58175936812126
-c59,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,24.78013,8644.426065,1.57683824753913
-c60,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.020904,8619.951648,1.58131532559479
-c61,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8619.167749,1.58145914359719
-c62,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.022917,8621.567384,1.58101897715193
-c63,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,24.78013,8642.905104,1.57711573634888
-
-c9,8611.635559
-
-Leakage Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,34.089465,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,34.089465,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,43.468097,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,34.089465,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,43.468097,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,34.089465,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,22.378472,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,43.468097,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,43.468097,0
-
-c0,0
-
-Memory Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,42.292967,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,42.292967,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,50.50002,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,42.292967,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,50.50002,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,42.292967,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,27.453113,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,50.50002,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,50.50002,0
-
-c0,0
-
-Memory Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,10.195835,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,10.195835,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,12.413397,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,10.195835,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,12.413397,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,10.195835,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,6.695458,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,12.413397,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,12.413397,0
-
-c0,0
-
-Patch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-
-c0,0
-
-Quantization Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c2,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c3,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c4,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c5,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c6,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c7,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c8,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c9,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c10,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c11,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c12,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c13,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c14,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c15,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c16,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c17,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c18,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c19,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c20,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c21,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c22,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c23,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c24,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c25,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c26,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c27,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c28,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c29,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c30,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c31,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c32,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c33,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c34,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c35,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c36,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c37,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c38,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c39,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c40,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c41,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c42,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c43,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c44,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c45,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c46,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c47,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c48,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c49,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c50,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c51,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c52,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c53,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c54,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c55,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c56,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c57,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c58,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c59,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c60,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c61,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c62,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c63,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-
-c0,0
-
-Quantization Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c2,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c3,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c4,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c5,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c6,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c7,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c8,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c9,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c10,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c11,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c12,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c13,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c14,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c15,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c16,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c17,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c18,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c19,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c20,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c21,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c22,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c23,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c24,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c25,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c26,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c27,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c28,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c29,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c30,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c31,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c32,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c33,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c34,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c35,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c36,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c37,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c38,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c39,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c40,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c41,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c42,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c43,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c44,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c45,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c46,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c47,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c48,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c49,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c50,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c51,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c52,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c53,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c54,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c55,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c56,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c57,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c58,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c59,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c60,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c61,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c62,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c63,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-
-c0,0
-
-Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,165.401,52.2864,38.6453,137.297,52.1549,38.1169,258.954,97.6062,68.9786,99.7425,23.3301,17.5398,118.361,46.095,34.7036,211.862,46.239,34.7264,292.43,45.4864,34.3235,107.339,15.8015,8.79447,199.034,31.138,17.2023,164.266,31.2193,17.2051,233.349,31.252,17.1975,54.8812,25.9899,4.36155,119.767,51.9324,8.7646,26.8288,2.83825,3083.44147,0.999999967568706
-c1,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c2,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c3,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c4,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2557.206359,1.20578510942984
-c5,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c6,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c7,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c8,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c9,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c10,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c11,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c12,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c13,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c14,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c15,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c16,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c17,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c18,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c19,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c20,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c21,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c22,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c23,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c24,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c25,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c26,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c27,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c28,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c29,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c30,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c31,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c32,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c33,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c34,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c35,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c36,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2557.206359,1.20578510942984
-c37,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2506.815655,1.23002317336242
-c38,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2557.206359,1.20578510942984
-c39,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c40,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c41,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c42,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c43,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c44,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2506.815655,1.23002317336242
-c45,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c46,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c47,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2557.206359,1.20578510942984
-c48,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c49,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c50,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c51,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c52,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2653.271486,1.16212810112233
-c53,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c54,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c55,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c56,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c57,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c58,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c59,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2506.815655,1.23002317336242
-c60,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c61,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c62,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c63,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2506.815655,1.23002317336242
-
-c1,2500.603024
-
-Unpatch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-
-c0,0
-
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/results/HS_loss2_results.txt b/llvm/projects/soc_simulator/mobilenet_shallow/results/HS_loss2_results.txt
deleted file mode 100644
index 965e1ff061ce5ca3247947e16033a1aef2a56db0..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/results/HS_loss2_results.txt
+++ /dev/null
@@ -1,880 +0,0 @@
-Compute Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,567.689,184.046,138.524,505.587,197.914,146.069,1032.03,409.331,295.295,435.587,99.2457,73.9864,532.734,210.096,158.225,941.502,219.088,164.507,1371.64,217.634,164.29,520.234,71.2826,36.7775,954.881,148.359,79.3874,793.877,145.953,77.3085,1117.62,148.499,78.9983,263.615,121.522,14.8101,577.124,249.047,36.7714,124.626,5.148905,13630.861805,0.999999992663707
-c1,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.029504,8120.325055,1.67861034439082
-c2,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,7779.757151,1.75209346066009
-c3,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,7890.393059,1.7275263133691
-c4,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,7766.974864,1.75497692063888
-c5,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,7771.751926,1.75389818915975
-c6,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,7775.723575,1.7530023409686
-c7,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,6.962637,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,7541.497044,1.80744771823519
-c8,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.039278,7536.883696,1.80855406212236
-c9,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,8475.90374,1.60818976504575
-c10,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,24.78013,8147.745734,1.67296109656726
-c11,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,7771.813852,1.7538842140569
-c12,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,6.962637,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8122.189696,1.67822497963701
-c13,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,24.78013,7913.624874,1.72245486105089
-c14,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.019420,7766.96478,1.7549791991592
-c15,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,7560.980267,1.80279026572957
-c16,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.039278,8123.004882,1.67805656099005
-c17,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,24.78013,7561.844075,1.80258432857749
-c18,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,15.711348,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,8364.359526,1.62963602887536
-c19,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,7775.723575,1.7530023409686
-c20,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,0.029504,7890.830847,1.72743046918047
-c21,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,11.801625,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,7.768049,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,24.78013,7918.463862,1.72140226569361
-c22,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.012918,9095.853143,1.49857978584802
-c23,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.029504,7543.604052,1.80694287907275
-c24,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.029504,7886.855667,1.72830113897023
-c25,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.039278,9330.356952,1.46091534643663
-c26,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,6.962637,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,24.78013,7564.292808,1.80200079118882
-c27,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,7.768049,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,0.019420,7891.626175,1.72725637656986
-c28,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.017407,9448.914288,1.44258496217418
-c29,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,8356.416227,1.63118510035911
-c30,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,8118.97519,1.67888943100848
-c31,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,6.962637,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,0.012918,7894.847837,1.72655153256564
-c32,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.019420,8356.406143,1.63118706877353
-c33,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,8356.416227,1.63118510035911
-c34,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8143.725816,1.67378690609165
-c35,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,7565.819255,1.80163722730068
-c36,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,7.768049,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,8356.416227,1.63118510035911
-c37,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8120.394228,1.67859604526831
-c38,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,6.962637,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.019420,7890.879159,1.72741989296734
-c39,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8374.191618,1.6277226822621
-c40,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,6.962637,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8380.361441,1.6265243138154
-c41,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,6.962637,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,8118.169778,1.67905599535916
-c42,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,7565.819255,1.80163722730068
-c43,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,24.78013,8150.42163,1.67241184033798
-c44,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.017407,9094.9591,1.49872709764328
-c45,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8120.394228,1.67859604526831
-c46,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,9326.375529,1.46153901013973
-c47,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8378.565973,1.62687286657863
-c48,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,9326.375529,1.46153901013973
-c49,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,3.887477,184.971,30.535,61.4365,0.017407,9096.260696,1.49851264279868
-c50,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,0.029504,9454.203161,1.44177795089612
-c51,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,24.78013,9119.721823,1.49465761347645
-c52,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.029504,9095.869729,1.49857705324028
-c53,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.017407,9682.335407,1.40780721708573
-c54,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,5.167169,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,7.862657,184.971,30.535,61.4365,0.039278,8477.681104,1.60785260462125
-c55,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,9330.347178,1.46091687681767
-c56,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,3.484413,184.971,30.535,61.4365,0.039278,9095.879503,1.49857544294059
-c57,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,0.029504,9330.347178,1.46091687681767
-c58,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,24.78013,8526.963233,1.59855991783705
-c59,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.012918,8503.71778,1.60292968291653
-c60,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.012918,8501.298287,1.60338588113129
-c61,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,2.583585,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8502.212607,1.60321345451373
-c62,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.012918,8507.46811,1.60222306666721
-c63,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8501.717579,1.6033068045378
-c64,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,5.906058,184.971,30.535,61.4365,0.012918,8507.037957,1.60230408206342
-c65,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.012918,8507.46811,1.60222306666721
-c66,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.017407,8507.472599,1.60222222124818
-c67,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,5.900812,104.698,54.9348,577.662,103.328,53.7829,15.711348,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.012918,8514.261959,1.60094459279551
-c68,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.481319,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8503.110341,1.60304419183775
-c69,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.039278,8505.699002,1.60255631448271
-c70,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.019420,8505.679144,1.6025600559315
-c71,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.017407,8507.472599,1.60222222124818
-c72,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,5.167169,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8505.689228,1.60255815600135
-c73,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,3.884025,104.698,54.9348,577.662,103.328,53.7829,11.801625,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8508.352035,1.60205661316344
-c74,489.961,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,7.855674,104.698,54.9348,577.662,103.328,53.7829,6.962637,103.913,54.7397,94.3137,100.201,18.1925,2.585881,184.971,30.535,61.4365,0.029504,8507.484696,1.60221994300935
-
-c8,7536.883696
-
-Compute Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,165.401,52.2864,38.6453,137.297,52.1549,38.1169,258.954,97.6062,68.9786,99.7425,23.3301,17.5398,118.361,46.095,34.7036,211.862,46.239,34.7264,292.43,45.4864,34.3235,107.339,15.8015,8.79447,199.034,31.138,17.2023,164.266,31.2193,17.2051,233.349,31.252,17.1975,54.8812,25.9899,4.36155,119.767,51.9324,8.7646,26.8288,2.83825,3083.44147,0.999999967568706
-c1,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2364.813634,1.30388344150237
-c2,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2265.24813,1.36119363395331
-c3,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2300.254242,1.34047849131285
-c4,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2265.24813,1.36119363395331
-c5,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2265.24813,1.36119363395331
-c6,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2265.24813,1.36119363395331
-c7,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2206.26725,1.39758287679868
-c8,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2206.26725,1.39758287679868
-c9,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2458.800626,1.25404284999386
-c10,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2371.186248,1.30037922687973
-c11,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2265.24813,1.36119363395331
-c12,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2364.813634,1.30388344150237
-c13,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2306.626856,1.33677509576455
-c14,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2265.24813,1.36119363395331
-c15,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c16,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2364.813634,1.30388344150237
-c17,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c18,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2423.794514,1.27215460096736
-c19,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2265.24813,1.36119363395331
-c20,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2300.254242,1.34047849131285
-c21,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2306.626856,1.33677509576455
-c22,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2639.121018,1.16835921207615
-c23,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2206.26725,1.39758287679868
-c24,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2300.254242,1.34047849131285
-c25,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2698.101898,1.14281871933887
-c26,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c27,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2300.254242,1.34047849131285
-c28,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2733.10801,1.1281813034465
-c29,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2423.794514,1.27215460096736
-c30,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2359.235122,1.30696652934253
-c31,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2300.254242,1.34047849131285
-c32,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2423.794514,1.27215460096736
-c33,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2423.794514,1.27215460096736
-c34,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2365.607736,1.30344574577238
-c35,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c36,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2423.794514,1.27215460096736
-c37,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2364.813634,1.30388344150237
-c38,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2300.254242,1.34047849131285
-c39,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2430.167128,1.26881863703579
-c40,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2430.167128,1.26881863703579
-c41,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2359.235122,1.30696652934253
-c42,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2212.639864,1.39355770489916
-c43,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2371.186248,1.30037922687973
-c44,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2639.121018,1.16835921207615
-c45,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2364.813634,1.30388344150237
-c46,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2698.101898,1.14281871933887
-c47,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2430.167128,1.26881863703579
-c48,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2698.101898,1.14281871933887
-c49,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2639.121018,1.16835921207615
-c50,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2733.10801,1.1281813034465
-c51,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2645.493632,1.16554480273477
-c52,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2639.121018,1.16835921207615
-c53,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2792.08889,1.10434928150338
-c54,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,8.303616,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2458.800626,1.25404284999386
-c55,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2698.101898,1.14281871933887
-c56,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2639.121018,1.16835921207615
-c57,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.036040,2698.101898,1.14281871933887
-c58,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,6.408654,2486.947248,1.23984991981423
-c59,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c60,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c61,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c62,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c63,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c64,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c65,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c66,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c67,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c68,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c69,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c70,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c71,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c72,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c73,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-c74,190.845,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,4.151808,27.6217,13.5987,160.813,27.6333,13.5078,8.303616,27.646,13.5936,24.574,26.0654,3.67647,4.613120,49.3767,6.99017,15.5609,0.036040,2480.574634,1.24303510300932
-
-c7,2206.26725
-
-Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,567.689,184.046,138.524,505.587,197.914,146.069,1032.03,409.331,295.295,435.587,99.2457,73.9864,532.734,210.096,158.225,941.502,219.088,164.507,1371.64,217.634,164.29,520.234,71.2826,36.7775,954.881,148.359,79.3874,793.877,145.953,77.3085,1117.62,148.499,78.9983,263.615,121.522,14.8101,577.124,249.047,36.7714,124.626,5.148905,13630.861805,0.999999992663707
-c1,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.033001,8261.718666,1.64988208762273
-c2,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,7947.701609,1.7150696269293
-c3,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8046.687793,1.69397172926985
-c4,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,7934.919322,1.71783241644618
-c5,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,7939.696384,1.71679885150129
-c6,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,7943.668033,1.71594049207242
-c7,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.186163,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,7728.030684,1.76382084724884
-c8,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.042775,7723.417336,1.76487441187168
-c9,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,8587.058445,1.58737264146602
-c10,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,24.78013,8288.135848,1.64462333756594
-c11,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,7939.75831,1.71678546136015
-c12,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.186163,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8263.583307,1.64950979903627
-c13,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,24.78013,8068.916111,1.68930516175365
-c14,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.022917,7934.909238,1.71783459953629
-c15,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,7746.51041,1.75961315580788
-c16,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.042775,8264.398493,1.64934709423931
-c17,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,24.78013,7747.374218,1.75941696444568
-c18,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,91.934874,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,8487.163955,1.60605612389096
-c19,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,7943.668033,1.71594049207242
-c20,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,1.033001,8047.125581,1.69387957207922
-c21,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,88.025151,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,52.908078,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,24.78013,8073.755099,1.68829268029929
-c22,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.016415,9161.023228,1.48791912398457
-c23,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.033001,7730.137692,1.76334008160976
-c24,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.033001,8043.150401,1.69471674107121
-c25,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.042775,9376.937855,1.4536580993086
-c26,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.186163,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,24.78013,7749.822951,1.75886103660665
-c27,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,52.908078,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,1.022917,8047.920909,1.69371217607089
-c28,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.020904,9483.845467,1.43727159080173
-c29,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,8479.220656,1.60756067063765
-c30,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,8256.680742,1.65088878519594
-c31,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.186163,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,1.016415,8051.142571,1.69303443772
-c32,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.022917,8479.210572,1.60756258244789
-c33,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,8479.220656,1.60756067063765
-c34,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8280.427871,1.64615426312969
-c35,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,7751.349398,1.75851467006062
-c36,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.991575,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,8479.220656,1.60756067063765
-c37,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8261.787839,1.64986827374921
-c38,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.186163,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.022917,8047.173893,1.69386940270672
-c39,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8495.99255,1.60438719364947
-c40,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.186163,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8502.162373,1.60322292690678
-c41,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,83.186163,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,8255.87533,1.65104983966552
-c42,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,7751.349398,1.75851467006062
-c43,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,24.78013,8290.811744,1.64409252814784
-c44,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.020904,9160.129185,1.48806434722717
-c45,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8261.787839,1.64986827374921
-c46,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,9372.956432,1.45427558086535
-c47,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,24.78013,8500.366905,1.60356156351628
-c48,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,9372.956432,1.45427558086535
-c49,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,22.476659,184.971,30.535,61.4365,1.020904,9161.430781,1.48785293280651
-c50,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,1.033001,9489.13434,1.43647051174037
-c51,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,24.78013,9183.888411,1.48421464270539
-c52,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.033001,9161.039814,1.48791643011719
-c53,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.020904,9698.677404,1.40543510178354
-c54,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,81.390695,143.829,100.069,147.475,60.7097,34.5499,359.856,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,26.451839,184.971,30.535,61.4365,1.042775,8588.835809,1.58704415236489
-c55,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,9376.928081,1.45365961452275
-c56,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,22.073595,184.971,30.535,61.4365,1.042775,9161.049588,1.4879148426468
-c57,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,596.404,103.913,54.7397,94.3137,100.201,18.1925,236.007,184.971,30.535,61.4365,1.033001,9376.928081,1.45365961452275
-c58,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,24.78013,8636.26985,1.57832743579304
-c59,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.016415,8614.027894,1.58240277539084
-c60,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.016415,8611.608401,1.58284736276819
-c61,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,32.822491,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8612.522721,1.58267932501308
-c62,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.016415,8617.778224,1.58171413704607
-c63,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8612.027693,1.58277029900895
-c64,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,24.49524,184.971,30.535,61.4365,1.016415,8617.348071,1.58179309162324
-c65,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.016415,8617.778224,1.58171413704607
-c66,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.020904,8617.782713,1.58171331313174
-c67,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,36.139718,104.698,54.9348,577.662,103.328,53.7829,60.851377,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.016415,8624.572073,1.58046817066157
-c68,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,33.720225,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8613.420455,1.58251437021584
-c69,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.042775,8616.009116,1.58203890725736
-c70,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.022917,8615.989258,1.58204255351635
-c71,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.020904,8617.782713,1.58171331313174
-c72,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,50.307198,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8615.999342,1.58204070192418
-c73,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,34.122931,104.698,54.9348,577.662,103.328,53.7829,56.941654,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8618.662149,1.58155191736183
-c74,505.2995,126.823,85.2148,454.401,157.667,98.9958,792.454,303.765,205.544,315.332,87.6934,64.6513,673.394,149.173,106.31,1119.27,145.624,102.738,981.699,143.829,100.069,147.475,60.7097,34.5499,38.09458,104.698,54.9348,577.662,103.328,53.7829,52.102666,103.913,54.7397,94.3137,100.201,18.1925,21.175063,184.971,30.535,61.4365,1.033001,8617.79481,1.58171109284382
-
-c8,7723.417336
-
-Leakage Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,51.294058,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,63.005051,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,60.148471,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,63.005051,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,63.005051,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,63.005051,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,72.638559,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,72.638559,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,38.80397,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,51.039182,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,63.005051,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,51.294058,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,63.005051,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,51.294058,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,41.66055,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,63.005051,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,60.148471,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,59.893595,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,22.378472,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,72.638559,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,60.148471,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,12.744964,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,60.148471,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,9.888384,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,41.66055,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,50.514963,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,60.148471,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,41.66055,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,41.66055,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0,50.260087,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,41.66055,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,51.294058,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,60.148471,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41.405674,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41.405674,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,21.344501,0,0,0,0,0,0,0,0,0,0.254876,50.514963,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,72.383683,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,51.039182,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,22.378472,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,51.294058,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,12.744964,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41.405674,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,12.744964,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,22.378472,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,9.888384,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0,22.123596,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,22.378472,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,0.254876,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28.915586,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,38.80397,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,12.744964,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,9.633508,0,0,0,0.254876,22.378472,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254876,12.744964,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0,43.468097,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c70,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-c74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.490088,0,0,0,0,0,21.344501,0,0,0,0,0,9.633508,0,0,0,0.254876,43.722973,0
-
-c0,0
-
-Memory Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,74.761053,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,89.600907,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,80.807763,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,89.600907,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,89.600907,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,89.600907,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,98.556581,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,98.556581,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,57.012235,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,74.012432,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,89.600907,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,74.761053,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,89.600907,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,74.761053,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,65.805379,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,89.600907,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,80.807763,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,80.059142,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,27.453113,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,98.556581,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,80.807763,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,18.497439,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,80.807763,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,9.704295,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,65.805379,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,71.852089,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,80.807763,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,65.805379,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,65.805379,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0,71.103468,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,65.805379,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,74.761053,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,80.807763,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,65.056758,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,65.056758,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,23.795528,0,0,0,0,0,0,0,0,0,0.748621,71.852089,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,97.80796,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,74.012432,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,27.453113,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,74.761053,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,18.497439,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,65.056758,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,18.497439,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,27.453113,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,9.704295,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0,26.704492,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,27.453113,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,0.748621,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,47.307940,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,57.012235,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,18.497439,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,8.955674,0,0,0,0.748621,27.453113,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.748621,18.497439,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0,50.50002,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c70,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-c74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.748818,0,0,0,0,0,23.795528,0,0,0,0,0,8.955674,0,0,0,0.748621,51.248641,0
-
-c0,0
-
-Memory Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,17.470719,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,20.971096,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,19.190711,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,20.971096,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,20.971096,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,20.971096,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,23.348641,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,23.348641,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,13.312789,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,17.310736,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,20.971096,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,17.470719,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,20.971096,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,17.470719,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,15.093174,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,20.971096,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,19.190711,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,19.030728,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,6.695458,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,23.348641,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,19.190711,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,4.317913,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,19.190711,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,2.537528,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,15.093174,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,16.813166,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,19.190711,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,15.093174,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,15.093174,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0,16.653183,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,15.093174,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,17.470719,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,19.190711,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.933191,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.933191,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,5.877922,0,0,0,0,0,0,0,0,0,0.159983,16.813166,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,23.188658,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,17.310736,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,6.695458,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,17.470719,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,4.317913,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.933191,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,4.317913,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,6.695458,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,2.537528,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0,6.535475,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,6.695458,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,0.159983,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.775261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,13.312789,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,4.317913,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,2.377545,0,0,0,0.159983,6.695458,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.159983,4.317913,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0,12.413397,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c70,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-c74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.157930,0,0,0,0,0,5.877922,0,0,0,0,0,2.377545,0,0,0,0.159983,12.57338,0
-
-c0,0
-
-Patch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c70,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-
-c0,0
-
-Quantization Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c2,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c3,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c4,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c5,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c6,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c7,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c8,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c9,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c10,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c11,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c12,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c13,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c14,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c15,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c16,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c17,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c18,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c19,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c20,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c21,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c22,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c23,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c24,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c25,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c26,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c27,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c28,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c29,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c30,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c31,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c32,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c33,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c34,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c35,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c36,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c37,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c38,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c39,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c40,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c41,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c42,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c43,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c44,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c45,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c46,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c47,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c48,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c49,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c50,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c51,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c52,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c53,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c54,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c55,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c56,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c57,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c58,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c59,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c60,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c61,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c62,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c63,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c64,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c65,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c66,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c67,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c68,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c69,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c70,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c71,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c72,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c73,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-c74,15.3385,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.3385,0
-
-c0,0
-
-Quantization Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c2,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c3,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c4,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c5,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c6,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c7,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c8,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c9,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c10,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c11,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c12,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c13,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c14,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c15,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c16,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c17,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c18,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c19,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c20,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c21,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c22,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c23,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c24,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c25,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c26,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c27,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c28,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c29,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c30,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c31,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c32,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c33,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c34,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c35,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c36,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c37,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c38,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c39,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c40,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c41,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c42,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c43,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c44,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c45,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c46,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c47,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c48,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c49,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c50,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c51,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c52,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c53,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c54,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c55,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c56,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c57,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c58,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c59,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c60,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c61,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c62,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c63,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c64,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c65,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c66,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c67,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c68,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c69,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c70,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c71,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c72,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c73,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-c74,7.45501,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.45501,0
-
-c0,0
-
-Time
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,165.401,52.2864,38.6453,137.297,52.1549,38.1169,258.954,97.6062,68.9786,99.7425,23.3301,17.5398,118.361,46.095,34.7036,211.862,46.239,34.7264,292.43,45.4864,34.3235,107.339,15.8015,8.79447,199.034,31.138,17.2023,164.266,31.2193,17.2051,233.349,31.252,17.1975,54.8812,25.9899,4.36155,119.767,51.9324,8.7646,26.8288,2.83825,3083.44147,0.999999967568706
-c1,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2389.739363,1.29028352995818
-c2,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2293.674236,1.34432400520176
-c3,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2326.899963,1.32512844837204
-c4,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2293.674236,1.34432400520176
-c5,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2293.674236,1.34432400520176
-c6,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2293.674236,1.34432400520176
-c7,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2237.070901,1.37833867079841
-c8,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2237.070901,1.37833867079841
-c9,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2479.568425,1.24353952670052
-c10,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2395.951994,1.28693786395881
-c11,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2293.674236,1.34432400520176
-c12,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2389.739363,1.29028352995818
-c13,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2333.112594,1.32159988582189
-c14,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2293.674236,1.34432400520176
-c15,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c16,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2389.739363,1.29028352995818
-c17,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c18,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2446.342698,1.26042902594062
-c19,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2293.674236,1.34432400520176
-c20,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2326.899963,1.32512844837204
-c21,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2333.112594,1.32159988582189
-c22,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2653.271486,1.16212810112233
-c23,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2237.070901,1.37833867079841
-c24,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2326.899963,1.32512844837204
-c25,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2709.874821,1.13785379764397
-c26,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c27,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2326.899963,1.32512844837204
-c28,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2743.100548,1.12407157653808
-c29,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2446.342698,1.26042902594062
-c30,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2383.503298,1.29365935563059
-c31,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2326.899963,1.32512844837204
-c32,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2446.342698,1.26042902594062
-c33,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2446.342698,1.26042902594062
-c34,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2389.715929,1.29029618271854
-c35,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c36,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2446.342698,1.26042902594062
-c37,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2389.739363,1.29028352995818
-c38,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2326.899963,1.32512844837204
-c39,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2452.555329,1.25723620087854
-c40,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2452.555329,1.25723620087854
-c41,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2383.503298,1.29365935563059
-c42,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2243.283532,1.37452144972455
-c43,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2395.951994,1.28693786395881
-c44,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2653.271486,1.16212810112233
-c45,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2389.739363,1.29028352995818
-c46,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2709.874821,1.13785379764397
-c47,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,6.408654,2452.555329,1.25723620087854
-c48,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2709.874821,1.13785379764397
-c49,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2653.271486,1.16212810112233
-c50,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2743.100548,1.12407157653808
-c51,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2659.484117,1.15941333672521
-c52,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2653.271486,1.16212810112233
-c53,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2799.703883,1.10134553107145
-c54,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,19.078877,40.5067,26.7374,39.6826,15.3482,7.99351,98.1388,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2479.568425,1.24353952670052
-c55,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2709.874821,1.13785379764397
-c56,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2653.271486,1.16212810112233
-c57,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,166.85,27.646,13.5936,24.574,26.0654,3.67647,63.594,49.3767,6.99017,15.5609,0.196023,2709.874821,1.13785379764397
-c58,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,6.408654,2506.815655,1.23002317336242
-c59,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c60,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c61,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c62,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c63,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c64,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c65,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c66,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c67,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c68,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c69,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c70,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c71,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c72,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c73,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-c74,198.30001,44.208,27.6261,146.888,46.4921,27.6025,243.635,86.3211,54.9261,87.5137,22.6355,15.9996,185.585,40.6227,27.7104,323.448,40.6142,27.2599,282.611,40.5067,26.7374,39.6826,15.3482,7.99351,8.309738,27.6217,13.5987,160.813,27.6333,13.5078,14.181538,27.646,13.5936,24.574,26.0654,3.67647,6.990665,49.3767,6.99017,15.5609,0.196023,2500.603024,1.23307910815839
-
-c7,2237.070901
-
-Unpatch Energy
-Configuration,Conv1,NML1,NML2,NML3,NML4,NML5,Conv3,NML6,NML7,NML8,NML9,NML10,Conv5,NML11,NML12,NML13,NML14,NML15,Conv7,NML16,NML17,NML18,NML19,NML20,Conv9,NML21,NML22,NML23,NML24,NML25,Conv11,NML26,NML27,NML28,NML29,NML30,Conv13,NML31,NML32,NML33,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c60,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c70,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-
-c0,0
-
diff --git a/llvm/projects/soc_simulator/mobilenet_shallow/test_conf.txt b/llvm/projects/soc_simulator/mobilenet_shallow/test_conf.txt
deleted file mode 100644
index 0142d9b480bac8f9f4fe69826d9afecf6ff22f30..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/mobilenet_shallow/test_conf.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9 9
-9,9,9,9,9,9,7,9,9,9,9,9,9,9,9,9,9,9,7,9,9,9,9,9,7,9,9,9,9,9,7,9,9,9,9,9,6,9,9,9,9 9
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_confs1.txt b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_confs1.txt
deleted file mode 100644
index 8ea283e0a2031578fd063967089b6f6cba54a549..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_confs1.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-9 9,9 9 9,9 9,9 9
-8 8,8 8 8,8 8,8 8 
-8 8,8 8 8,8 8,8 8
-8 8,9 9 9,8 8,8 8 
-9 9,9 9 9,9 9,9 9
-9 9,9 9 9,9 9,8 8
-8 8,9 9 9,9 9,9 9
-8 8,8 8 8,8 8,9 9
-8 8,8 8 8,9 9,9 9
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_confs2.txt b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_confs2.txt
deleted file mode 100644
index 834a35f35200d56d029fda983afeb6aac188c6ec..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_confs2.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-9 9,9 9 9,9 9,9 9
-7,8 8 8,8 8,8 8
-7,8 8 8,8 8,8 8 
-7,8 8 8,9 9,9 9
-7,9 9 9,9 9,9 9
-7,8 8 8,8 8,8 8
-7,8 8 8,8 8,8 8
-7,9 9 9,9 9,9 9
-7,9 9 9,9 9,9 9
-7,8 8 8,8 8,9 9 
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_fp16.csv b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_fp16.csv
deleted file mode 100644
index 2beaa67647d3eac8f2d103e02829ffe3687264ec..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_fp16.csv
+++ /dev/null
@@ -1,27 +0,0 @@
-Add1,32.6334,133.401,91.0356,42.3651,4087.88,2789.67,1298.21,0.30589,1.78751,1.34011,0.538346,42.1225,33.8662,11.5884
-Add1_f2h,150.837,630.018,462.198,167.819,4177.07,3064.48,1112.6,3.34,12.5306,8.92834,3.71182,20.7606,19.9311,3.16457
-Add1_h2f,36.4014,148.497,98.4511,50.0455,4079.39,2704.57,1374.82,0.232494,1.69279,1.38382,0.450555,35.7399,31.9227,8.46863
-ClipRelu1,32.7163,162.53,121.022,41.5078,4967.92,3699.21,1268.71,0.509468,2.88278,2.1792,0.771439,50.356,41.5967,12.18
-ClipRelu1_f2h,146.28,768.006,611.086,156.92,5250.53,4177.78,1072.75,3.35138,15.912,12.4068,3.55674,20.529,19.3936,3.22652
-ClipRelu1_h2f,36.4137,177.541,128.57,48.9711,4875.78,3530.89,1344.88,0.247154,3.76564,2.8014,1.02577,101.77,75.6996,27.7919
-ClipRelu2,33.9773,143.994,89.1194,54.8744,4237.92,2622.89,1615.02,0.358201,1.94994,1.31654,0.703475,32.9941,25.2563,10.9937
-ClipRelu2_f2h,45.9326,186.794,119.276,67.5184,4066.86,2596.9,1469.96,0.535982,2.01803,1.37983,0.80847,31.9019,27.4409,8.03545
-ClipRelu2_h2f,36.3771,153.809,93.3831,60.4256,4228.16,2567.08,1661.09,0.272555,1.57442,1.09211,0.576984,27.6439,21.9052,9.42432
-ClipRelu3,32.7232,158.115,116.963,41.1516,4831.86,3574.3,1257.56,0.305072,2.35216,1.8683,0.53915,53.4613,44.534,10.9329
-ClipRelu3_f2h,164.164,848.865,673.112,175.753,5171.2,4100.61,1070.59,3.58947,15.7511,11.9108,3.87994,20.1933,19.4671,3.41194
-ClipRelu3_h2f,36.4919,173.755,124.9,48.8553,4761.5,3422.69,1338.8,0.203696,1.48147,1.21164,0.365454,33.735,29.09,7.55146
-ClipRelu4,32.7295,118.22,75.8625,42.3577,3612.06,2317.88,1294.18,0.276555,1.37079,0.949542,0.473891,31.4617,23.0321,10.4641
-ClipRelu4_f2h,163.003,595.319,411.376,183.943,3652.48,2524,1128.48,3.34937,10.2919,6.68183,3.72234,18.2987,17.2686,3.28064
-ClipRelu4_h2f,36.6193,132.943,83.2401,49.7026,3630.37,2273.09,1357.27,0.290084,1.45682,1.02302,0.49639,23.3497,18.4425,7.63415
-Conv1,2915.2,13346.4,10579.6,2766.84,4578.36,3629.24,949.121,19.1516,21.5507,9.90059,14.4656,23.7835,21.855,2.90162
-Conv1_f2h,135.389,213.981,45.5551,168.426,1580.59,336.366,1244.22,2.76286,4.12087,1.96844,2.56877,13.6602,9.16935,15.2463
-Conv1_h2f,36.5344,219.647,187.103,32.5449,6012.68,5121.79,890.894,0.35844,3.81895,3.22822,0.598408,117.302,99.2913,18.2
-Conv2,1441.62,6486.13,4951.37,1534.75,4499.3,3434.69,1064.61,7.26356,12.662,11.4595,6.58994,20.9029,19.7516,2.06059
-Conv2_f2h,46.0741,220.445,153.984,66.4611,4784.9,3342.35,1442.55,0.634155,4.11911,2.88656,1.31165,81.2524,58.8475,24.3049
-Conv2_h2f,36.4601,170.807,136.465,34.3419,4684.78,3742.87,941.904,0.250998,1.42323,1.12292,0.309893,24.1214,18.7186,5.77896
-Conv3,2555.58,13947.5,11383.1,2564.39,5457.74,4454.29,1003.45,9.97427,18.0324,15.5434,10.2457,19.44,19.0993,1.46803
-Conv3_f2h,49.4496,206.48,121.717,84.7629,4175.81,2461.68,1714.14,0.747212,2.66778,1.58538,1.30637,32.2439,28.9397,7.96794
-Conv3_h2f,36.6494,221.625,188.876,32.7499,6047.14,5153.55,893.593,0.293397,2.27039,1.92317,0.360122,33.9749,28.4655,6.08276
-Conv4,1291.92,4870.81,3421.82,1448.98,3770.3,2648.72,1121.58,7.14123,18.8651,16.1964,7.55982,20.3334,19.1008,2.49458
-Conv4_f2h,51.784,240.28,166.251,74.0293,4640.36,3210.75,1429.61,0.781657,3.06736,2.07678,1.09645,37.4368,31.5405,8.47733
-Conv4_h2f,36.7521,150.132,114.253,35.879,4084.99,3108.74,976.248,0.253787,1.30348,1.00169,0.314015,22.3553,16.931,5.92274
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_fp32.csv b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_fp32.csv
deleted file mode 100644
index e790a0bb8ea6443dcfca9f588a5f502aa4aff1f0..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_fp32.csv
+++ /dev/null
@@ -1,9 +0,0 @@
-Add1,36.9674,189.809,150.644,39.1653,5134.5,4075.05,1059.46,0.417224,2.39475,1.88794,0.518749,28.8232,22.4334,7.04925
-ClipRelu1,35.8466,224.605,189.527,35.0779,6266.3,5287.65,978.65,0.364512,4.0914,3.39016,0.708936,123.31,102.442,21.0743
-ClipRelu2,35.6596,186.419,140.926,45.4927,5227.72,3951.98,1275.75,0.363001,2.20865,1.6516,0.571887,31.3186,23.0189,9.04323
-ClipRelu3,35.7018,224.339,189.337,35.0012,6283.64,5303.27,980.364,0.36817,2.76771,2.30071,0.481815,34.9818,28.6265,7.17676
-ClipRelu4,35.7814,168.038,128.569,39.4683,4696.23,3593.19,1103.04,0.344254,1.9021,1.47145,0.444171,29.0004,22.4544,7.22865
-Conv1,3221.85,16081.5,13019.4,3062.09,4991.49,4041.07,950.42,16.1668,28.1619,19.2301,13.2614,19.7491,18.2005,2.65225
-Conv2,1531.77,7672.48,6141.04,1531.44,5008.96,4009.17,999.786,6.88078,12.1701,8.68228,6.44409,18.5451,17.5047,2.29932
-Conv3,2958.33,17316.3,14500,2816.36,5853.44,4901.43,952.008,7.26292,17.3729,14.9615,7.27995,12.8672,12.488,1.42891
-Conv4,1236.26,5658.48,4357.46,1301.02,4577.33,3524.93,1052.4,10.0982,13.1562,7.31823,8.97215,28.8055,27.1761,2.57428
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_layers.txt b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_layers.txt
deleted file mode 100644
index 10199705ce8b6061351a9018c085549a2d330230..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_layers.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Conv1,2000,1,240,300,1,1,9,9,1,1
-Conv2,2000,1,240,300,1,1,5,5,1,1
-Conv3,2000,1,240,300,1,1,9,9,1,1
-Conv4,2000,1,240,300,1,1,3,3,1,1
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_ops.txt b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_ops.txt
deleted file mode 100644
index 0807a77c985bd73c6c538a1259b2ffc44eeda73a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_ops.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-#Conv1,2
-Conv1
-ClipRelu1
-#Conv2,3
-Conv2
-Add1
-ClipRelu2
-#Conv3,2
-Conv3
-ClipRelu3
-#Conv4,2
-Conv4
-ClipRelu4
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_confs1.txt b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_confs1.txt
deleted file mode 100644
index 39ea99038c7ed190c73ff101597f7b46ca3ecf46..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_confs1.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-9 9,9 9 9,9 9,9 9
-8 8,8 8 8,8 8,9 9
-9 9,8 8 8,9 9,8 8
-9 9,8 8 8,8 8,8 8
-8 8,9 9 9,9 9,9 9
-8 8,8 8 8,9 9,9 9
-8 8,9 9 9,8 8,9 9
-9 9,8 8 8,9 9,9 9
-9 9,8 8 8,8 8,9 9
-9 9,8 8 8,8 8,8 8
-8 8,8 8 8,8 8,8 8
-8 8,9 9 9,9 9,9 9
-8 8,9 9 9,8 8,8 8
-9 9,9 9 9,8 8,9 9
-9 9,9 9 9,9 9,9 9
-9 9,9 9 9,9 9,8 8
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_confs2.txt b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_confs2.txt
deleted file mode 100644
index 4a23019eb2d256d728fcd7c6d66a76d5a25c6206..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_confs2.txt
+++ /dev/null
@@ -1,34 +0,0 @@
-9 9,9 9 9,9 9,9 9
-5,9 9 9,9 9,8 8
-5,8 8 8,9 9,9 9
-5,9 9 9,9 9,9 9
-5,8 8 8,9 9,8 8
-7,9 9 9,7,9 9
-7,8 8 8,7,8 8
-7,9 9 9,7,8 8
-7,8 8 8,7,9 9
-7,9 9 9,9 9,9 9
-7,8 8 8,9 9,8 8
-7,8 8 8,9 9,9 9
-9 9,9 9 9,9 9,9 9
-6,9 9 9,9 9,9 9
-7,9 9 9,8 8,9 9
-7,9 9 9,9 9,8 8
-8 8,8 8 8,9 9,8 8
-8 8,8 8 8,7,9 9
-7,8 8 8,8 8,9 9
-6,8 8 8,8 8,9 9
-8 8,8 8 8,9 9,9 9
-8 8,8 8 8,8 8,9 9
-8 8,9 9 9,8 8,9 9
-6,9 9 9,8 8,8 8
-8 8,9 9 9,9 9,9 9
-9 9,9 9 9,7,8 8
-7,8 8 8,8 8,8 8
-8 8,9 9 9,7,9 9
-6,8 8 8,9 9,9 9
-6,8 8 8,8 8,8 8
-8 8,9 9 9,9 9,8 8
-8 8,8 8 8,7,8 8
-9 9,8 8 8,9 9,8 8
-9 9,9 9 9,8 8,8 8
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_results1.csv b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_results1.csv
deleted file mode 100644
index 40a0db161373ec37f4f22800a62a9e942b25d337..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_results1.csv
+++ /dev/null
@@ -1,220 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c1,13508.93,6763.525,14105.615,5826.518,40204.588,1.186978209584
-c2,16306.105,6763.525,17540.639,4989.03,45599.299,1.04655051594861
-c3,16306.105,6763.525,14105.615,4989.03,42164.275,1.13181051700329
-c4,13508.93,8048.708,17540.639,5826.518,44924.795,1.06226349822573
-c5,13508.93,6763.525,17540.639,5826.518,43639.612,1.09354707119406
-c6,13508.93,8048.708,14105.615,5826.518,41489.771,1.15021049127938
-c7,16306.105,6763.525,17540.639,5826.518,46436.787,1.02767596511861
-c8,16306.105,6763.525,14105.615,5826.518,43001.763,1.10976775275524
-c9,16306.105,6763.525,14105.615,4989.03,42164.275,1.13181051700329
-c10,13508.93,6763.525,14105.615,4989.03,39367.1,1.21222975222399
-c11,13508.93,8048.708,17540.639,5826.518,44924.795,1.06226349822573
-c12,13508.93,8048.708,14105.615,4989.03,40652.283,1.17390626948576
-c13,16306.105,8048.708,14105.615,5826.518,44286.946,1.07756289838192
-c14,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c15,16306.105,8048.708,17540.639,4989.03,46884.482,1.01786279516139
-c10,39367.1
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c1,2947.9163,1508.2307,2588.3032,1272.0414,8316.4916,1.0975982576884
-c2,3257.6966,1508.2307,2994.0318,1324.6495,9084.6086,1.00479471394294
-c3,3257.6966,1508.2307,2588.3032,1324.6495,8678.88,1.05176781967526
-c4,2947.9163,1604.397,2994.0318,1272.0414,8818.3865,1.0351288976149
-c5,2947.9163,1508.2307,2994.0318,1272.0414,8722.2202,1.04654164719962
-c6,2947.9163,1604.397,2588.3032,1272.0414,8412.6579,1.08505145460567
-c7,3257.6966,1508.2307,2994.0318,1272.0414,9032.0005,1.01064727564345
-c8,3257.6966,1508.2307,2588.3032,1272.0414,8626.2719,1.05818212085128
-c9,3257.6966,1508.2307,2588.3032,1324.6495,8678.88,1.05176781967526
-c10,2947.9163,1508.2307,2588.3032,1324.6495,8369.0997,1.09069876308561
-c11,2947.9163,1604.397,2994.0318,1272.0414,8818.3865,1.0351288976149
-c12,2947.9163,1604.397,2588.3032,1324.6495,8465.266,1.07830831212736
-c13,3257.6966,1604.397,2588.3032,1272.0414,8722.4382,1.04651549097229
-c14,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c15,3257.6966,1604.397,2994.0318,1324.6495,9180.7749,0.994269743022784
-c1,8316.4916
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c1,13722.911,6763.525,14105.615,5976.65,40568.701,1.17632481952941
-c2,16306.105,6983.97,17762.264,5229.31,46281.649,1.03112077741413
-c3,16306.105,6983.97,14105.615,4989.03,42384.72,1.1259239152083
-c4,13722.911,8219.515,17540.639,5826.518,45309.583,1.05324231067577
-c5,13722.911,6763.525,17762.264,5826.518,44075.218,1.08273928200936
-c6,13722.911,8219.515,14312.095,5976.65,42231.171,1.1300176802343
-c7,16306.105,6983.97,17762.264,5826.518,46878.857,1.01798492864708
-c8,16306.105,6983.97,14105.615,5976.65,43372.34,1.10028580173381
-c9,16306.105,6983.97,14105.615,4989.03,42384.72,1.1259239152083
-c10,13722.911,6763.525,14105.615,4989.03,39581.081,1.20567626436055
-c11,13722.911,8219.515,17540.639,5826.518,45309.583,1.05324231067577
-c12,13722.911,8219.515,14312.095,4989.03,41243.551,1.15707713635745
-c13,16306.105,8048.708,14312.095,5976.65,44643.558,1.06895534386181
-c14,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c15,16306.105,8048.708,17540.639,5229.31,47124.762,1.0126729106607
-c10,39581.081
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,213.981,0,0,150.132,364.113,0
-c2,0,220.445,221.625,240.28,682.35,0
-c3,0,220.445,0,0,220.445,0
-c4,213.981,170.807,0,0,384.788,0
-c5,213.981,0,221.625,0,435.606,0
-c6,213.981,170.807,206.48,150.132,741.4,0
-c7,0,220.445,221.625,0,442.07,0
-c8,0,220.445,0,150.132,370.577,0
-c9,0,220.445,0,0,220.445,0
-c10,213.981,0,0,0,213.981,0
-c11,213.981,170.807,0,0,384.788,0
-c12,213.981,170.807,206.48,0,591.268,0
-c13,0,0,206.48,150.132,356.612,0
-c14,0,0,0,0,0,0
-c15,0,0,0,240.28,240.28,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,135.389,0,0,36.7521,172.1411,0
-c2,0,46.0741,36.6494,51.784,134.5075,0
-c3,0,46.0741,0,0,46.0741,0
-c4,135.389,36.4601,0,0,171.8491,0
-c5,135.389,0,36.6494,0,172.0384,0
-c6,135.389,36.4601,49.4496,36.7521,258.0508,0
-c7,0,46.0741,36.6494,0,82.7235,0
-c8,0,46.0741,0,36.7521,82.8262,0
-c9,0,46.0741,0,0,46.0741,0
-c10,135.389,0,0,0,135.389,0
-c11,135.389,36.4601,0,0,171.8491,0
-c12,135.389,36.4601,49.4496,0,221.2987,0
-c13,0,0,49.4496,36.7521,86.2017,0
-c14,0,0,0,0,0,0
-c15,0,0,0,51.784,51.784,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c1,3083.3053,1508.2307,2588.3032,1308.7935,8488.6327,1.07534004769296
-c2,3257.6966,1554.3048,3030.6812,1376.4335,9219.1161,0.990134694256267
-c3,3257.6966,1554.3048,2588.3032,1324.6495,8724.9541,1.04621372109896
-c4,3083.3053,1640.8571,2994.0318,1272.0414,8990.2356,1.01534232300495
-c5,3083.3053,1508.2307,3030.6812,1272.0414,8894.2586,1.02629877406197
-c6,3083.3053,1640.8571,2637.7528,1308.7935,8670.7087,1.05275900858301
-c7,3257.6966,1554.3048,3030.6812,1272.0414,9114.724,1.00147483345108
-c8,3257.6966,1554.3048,2588.3032,1308.7935,8709.0981,1.04811848372544
-c9,3257.6966,1554.3048,2588.3032,1324.6495,8724.9541,1.04621372109896
-c10,3083.3053,1508.2307,2588.3032,1324.6495,8504.4887,1.07333515448924
-c11,3083.3053,1640.8571,2994.0318,1272.0414,8990.2356,1.01534232300495
-c12,3083.3053,1640.8571,2637.7528,1324.6495,8686.5647,1.05083735747876
-c13,3257.6966,1604.397,2637.7528,1308.7935,8808.6399,1.03627424891924
-c14,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c15,3257.6966,1604.397,2994.0318,1376.4335,9232.5589,0.988693037325838
-c1,8488.6327
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_results2.csv b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_results2.csv
deleted file mode 100644
index 3762ea7ee951b3a472bd4148816e772b22b8dd67..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_promise_results2.csv
+++ /dev/null
@@ -1,418 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c1,7.585986,8048.708,17540.639,4989.03,30585.962986,1.56025722864498
-c2,7.585986,6763.525,17540.639,5826.518,30138.267986,1.58343438527472
-c3,7.585986,8048.708,17540.639,5826.518,31423.450986,1.51867374049381
-c4,7.585986,6763.525,17540.639,4989.03,29300.779986,1.62869281500125
-c5,15.343114,8048.708,15.343114,5826.518,13905.912228,3.4317755552011
-c6,15.343114,6763.525,15.343114,4989.03,11783.241228,4.04998664387874
-c7,15.343114,8048.708,15.343114,4989.03,13068.424228,3.65170037352953
-c8,15.343114,6763.525,15.343114,5826.518,12620.729228,3.78123710284519
-c9,15.343114,8048.708,17540.639,5826.518,31431.208114,1.51829893636554
-c10,15.343114,6763.525,17540.639,4989.03,29308.537114,1.62826174679248
-c11,15.343114,6763.525,17540.639,5826.518,30146.025114,1.58302693841832
-c12,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c13,11.525024,8048.708,17540.639,5826.518,31427.390024,1.51848339336191
-c14,15.343114,8048.708,14105.615,5826.518,27996.184114,1.70458836944414
-c15,15.343114,8048.708,17540.639,4989.03,30593.720114,1.5598616208225
-c16,13508.93,6763.525,17540.639,4989.03,42802.124,1.11494396606359
-c17,13508.93,6763.525,15.343114,5826.518,26114.316114,1.82742560092062
-c18,15.343114,6763.525,14105.615,5826.518,26711.001114,1.78660356523767
-c19,11.525024,6763.525,14105.615,5826.518,26707.183024,1.78685898016386
-c20,13508.93,6763.525,17540.639,5826.518,43639.612,1.09354707119406
-c21,13508.93,6763.525,14105.615,5826.518,40204.588,1.186978209584
-c22,13508.93,8048.708,14105.615,5826.518,41489.771,1.15021049127938
-c23,11.525024,8048.708,14105.615,4989.03,27154.878024,1.75739952807309
-c24,13508.93,8048.708,17540.639,5826.518,44924.795,1.06226349822573
-c25,16306.105,8048.708,15.343114,4989.03,29359.186114,1.62545275104538
-c26,15.343114,6763.525,14105.615,4989.03,25873.513114,1.84443332473991
-c27,13508.93,8048.708,15.343114,5826.518,27399.499114,1.74170957021054
-c28,11.525024,6763.525,17540.639,5826.518,30142.207024,1.58322745921292
-c29,11.525024,6763.525,14105.615,4989.03,25869.695024,1.84470554334933
-c30,13508.93,8048.708,17540.639,4989.03,44087.307,1.08244238850324
-c31,13508.93,6763.525,15.343114,4989.03,25276.828114,1.88797303189996
-c32,16306.105,6763.525,17540.639,4989.03,45599.299,1.04655051594861
-c33,16306.105,8048.708,14105.615,4989.03,43449.458,1.09833291568716
-c6,11783.241228
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c1,7.919790,1604.397,2994.0318,1324.6495,5930.99809,1.53906079677964
-c2,7.919790,1508.2307,2994.0318,1272.0414,5782.22369,1.57866024068224
-c3,7.919790,1604.397,2994.0318,1272.0414,5878.38999,1.55283447682867
-c4,7.919790,1508.2307,2994.0318,1324.6495,5834.83179,1.56442670021809
-c5,7.919790,1604.397,7.919790,1272.0414,2892.27798,3.15604742957496
-c6,7.919790,1508.2307,7.919790,1324.6495,2848.71978,3.20430480514637
-c7,7.919790,1604.397,7.919790,1324.6495,2944.88608,3.09966709816948
-c8,7.919790,1508.2307,7.919790,1272.0414,2796.11168,3.2645929484264
-c9,7.919790,1604.397,2994.0318,1272.0414,5878.38999,1.55283447682867
-c10,7.919790,1508.2307,2994.0318,1324.6495,5834.83179,1.56442670021809
-c11,7.919790,1508.2307,2994.0318,1272.0414,5782.22369,1.57866024068224
-c12,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c13,7.919790,1604.397,2994.0318,1272.0414,5878.38999,1.55283447682867
-c14,7.919790,1604.397,2588.3032,1272.0414,5472.66139,1.66795750416494
-c15,7.919790,1604.397,2994.0318,1324.6495,5930.99809,1.53906079677964
-c16,2947.9163,1508.2307,2994.0318,1324.6495,8774.8283,1.04026727177935
-c17,2947.9163,1508.2307,7.919790,1272.0414,5736.10819,1.59135189548522
-c18,7.919790,1508.2307,2588.3032,1272.0414,5376.49509,1.69779130779805
-c19,7.919790,1508.2307,2588.3032,1272.0414,5376.49509,1.69779130779805
-c20,2947.9163,1508.2307,2994.0318,1272.0414,8722.2202,1.04654164719962
-c21,2947.9163,1508.2307,2588.3032,1272.0414,8316.4916,1.0975982576884
-c22,2947.9163,1604.397,2588.3032,1272.0414,8412.6579,1.08505145460567
-c23,7.919790,1604.397,2588.3032,1324.6495,5525.26949,1.65207627452618
-c24,2947.9163,1604.397,2994.0318,1272.0414,8818.3865,1.0351288976149
-c25,3257.6966,1604.397,7.919790,1324.6495,6194.66289,1.47355341440455
-c26,7.919790,1508.2307,2588.3032,1324.6495,5429.10319,1.68133968215587
-c27,2947.9163,1604.397,7.919790,1272.0414,5832.27449,1.56511266044488
-c28,7.919790,1508.2307,2994.0318,1272.0414,5782.22369,1.57866024068224
-c29,7.919790,1508.2307,2588.3032,1324.6495,5429.10319,1.68133968215587
-c30,2947.9163,1604.397,2994.0318,1324.6495,8870.9946,1.0289902213559
-c31,2947.9163,1508.2307,7.919790,1324.6495,5788.71629,1.57688962198405
-c32,3257.6966,1508.2307,2994.0318,1324.6495,9084.6086,1.00479471394294
-c33,3257.6966,1604.397,2588.3032,1324.6495,8775.0463,1.04024142823906
-c8,2796.11168
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c1,82.701434,8048.708,17540.639,5229.31,30901.358434,1.54433242627481
-c2,82.701434,6763.525,17762.264,5826.518,30435.008434,1.5679959460727
-c3,82.701434,8048.708,17540.639,5826.518,31498.566434,1.51505211986356
-c4,82.701434,6763.525,17762.264,5229.31,29837.800434,1.59937961732873
-c5,90.458562,8048.708,90.458562,5826.518,14056.143124,3.39509702195675
-c6,90.458562,6763.525,90.458562,4989.03,11933.472124,3.99900122145707
-c7,90.458562,8048.708,90.458562,4989.03,13218.655124,3.61019855585273
-c8,90.458562,6763.525,90.458562,5826.518,12770.960124,3.73675660740982
-c9,90.458562,8048.708,17540.639,5826.518,31506.323562,1.5146791009945
-c10,90.458562,6763.525,17762.264,5229.31,29845.557562,1.59896392422785
-c11,90.458562,6763.525,17762.264,5826.518,30442.765562,1.56759640467123
-c12,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c13,86.640472,8048.708,17540.639,5826.518,31502.505472,1.51486267944402
-c14,90.458562,8048.708,14312.095,5976.65,28427.911562,1.67870122038513
-c15,90.458562,8048.708,17540.639,5229.31,30909.115562,1.54394485179885
-c16,13722.911,6763.525,17762.264,5229.31,43478.01,1.09761164069466
-c17,13722.911,6763.525,90.458562,5826.518,26403.412562,1.80741673854463
-c18,90.458562,6763.525,14105.615,5976.65,26936.248562,1.77166355266549
-c19,86.640472,6763.525,14105.615,5976.65,26932.430472,1.77191471346866
-c20,13722.911,6763.525,17762.264,5826.518,44075.218,1.08273928200936
-c21,13722.911,6763.525,14105.615,5976.65,40568.701,1.17632481952941
-c22,13722.911,8219.515,14312.095,5976.65,42231.171,1.1300176802343
-c23,86.640472,8048.708,14312.095,4989.03,27436.473472,1.73936238105695
-c24,13722.911,8219.515,17540.639,5826.518,45309.583,1.05324231067577
-c25,16306.105,8048.708,90.458562,4989.03,29434.301562,1.6213046447645
-c26,90.458562,6763.525,14105.615,4989.03,25948.628562,1.83909410480276
-c27,13722.911,8219.515,90.458562,5826.518,27859.402562,1.71295740181437
-c28,86.640472,6763.525,17762.264,5826.518,30438.947472,1.56779303512774
-c29,86.640472,6763.525,14105.615,4989.03,25944.810472,1.83936474955428
-c30,13722.911,8219.515,17540.639,5229.31,44712.375,1.06731011030546
-c31,13722.911,6763.525,90.458562,4989.03,25565.924562,1.86662405647044
-c32,16306.105,6983.97,17762.264,5229.31,46281.649,1.03112077741413
-c33,16306.105,8048.708,14312.095,4989.03,43655.938,1.09313811767568
-c6,11933.472124
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,28.951489,0,0,0,28.951489,0
-c2,28.951489,0,0,0,28.951489,0
-c3,28.951489,0,0,0,28.951489,0
-c4,28.951489,0,0,0,28.951489,0
-c5,28.951489,0,28.951489,0,57.902978,0
-c6,28.951489,0,28.951489,0,57.902978,0
-c7,28.951489,0,28.951489,0,57.902978,0
-c8,28.951489,0,28.951489,0,57.902978,0
-c9,28.951489,0,0,0,28.951489,0
-c10,28.951489,0,0,0,28.951489,0
-c11,28.951489,0,0,0,28.951489,0
-c12,0,0,0,0,0,0
-c13,28.951489,0,0,0,28.951489,0
-c14,28.951489,0,0,0,28.951489,0
-c15,28.951489,0,0,0,28.951489,0
-c16,0,0,0,0,0,0
-c17,0,0,28.951489,0,28.951489,0
-c18,28.951489,0,0,0,28.951489,0
-c19,28.951489,0,0,0,28.951489,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,28.951489,0,0,0,28.951489,0
-c24,0,0,0,0,0,0
-c25,0,0,28.951489,0,28.951489,0
-c26,28.951489,0,0,0,28.951489,0
-c27,0,0,28.951489,0,28.951489,0
-c28,28.951489,0,0,0,28.951489,0
-c29,28.951489,0,0,0,28.951489,0
-c30,0,0,0,0,0,0
-c31,0,0,28.951489,0,28.951489,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,46.163959,0,0,0,46.163959,0
-c2,46.163959,0,0,0,46.163959,0
-c3,46.163959,0,0,0,46.163959,0
-c4,46.163959,0,0,0,46.163959,0
-c5,46.163959,0,46.163959,0,92.327918,0
-c6,46.163959,0,46.163959,0,92.327918,0
-c7,46.163959,0,46.163959,0,92.327918,0
-c8,46.163959,0,46.163959,0,92.327918,0
-c9,46.163959,0,0,0,46.163959,0
-c10,46.163959,0,0,0,46.163959,0
-c11,46.163959,0,0,0,46.163959,0
-c12,0,0,0,0,0,0
-c13,46.163959,0,0,0,46.163959,0
-c14,46.163959,0,0,0,46.163959,0
-c15,46.163959,0,0,0,46.163959,0
-c16,0,0,0,0,0,0
-c17,0,0,46.163959,0,46.163959,0
-c18,46.163959,0,0,0,46.163959,0
-c19,46.163959,0,0,0,46.163959,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,46.163959,0,0,0,46.163959,0
-c24,0,0,0,0,0,0
-c25,0,0,46.163959,0,46.163959,0
-c26,46.163959,0,0,0,46.163959,0
-c27,0,0,46.163959,0,46.163959,0
-c28,46.163959,0,0,0,46.163959,0
-c29,46.163959,0,0,0,46.163959,0
-c30,0,0,0,0,0,0
-c31,0,0,46.163959,0,46.163959,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,10.930367,0,0,0,10.930367,0
-c2,10.930367,0,0,0,10.930367,0
-c3,10.930367,0,0,0,10.930367,0
-c4,10.930367,0,0,0,10.930367,0
-c5,10.930367,0,10.930367,0,21.860734,0
-c6,10.930367,0,10.930367,0,21.860734,0
-c7,10.930367,0,10.930367,0,21.860734,0
-c8,10.930367,0,10.930367,0,21.860734,0
-c9,10.930367,0,0,0,10.930367,0
-c10,10.930367,0,0,0,10.930367,0
-c11,10.930367,0,0,0,10.930367,0
-c12,0,0,0,0,0,0
-c13,10.930367,0,0,0,10.930367,0
-c14,10.930367,0,0,0,10.930367,0
-c15,10.930367,0,0,0,10.930367,0
-c16,0,0,0,0,0,0
-c17,0,0,10.930367,0,10.930367,0
-c18,10.930367,0,0,0,10.930367,0
-c19,10.930367,0,0,0,10.930367,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,10.930367,0,0,0,10.930367,0
-c24,0,0,0,0,0,0
-c25,0,0,10.930367,0,10.930367,0
-c26,10.930367,0,0,0,10.930367,0
-c27,0,0,10.930367,0,10.930367,0
-c28,10.930367,0,0,0,10.930367,0
-c29,10.930367,0,0,0,10.930367,0
-c30,0,0,0,0,0,0
-c31,0,0,10.930367,0,10.930367,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,0,0,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,0,0,0,0,0,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,0,0,0
-c28,0,0,0,0,0,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,0,0,0,0,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,240.28,240.28,0
-c2,0,0,221.625,0,221.625,0
-c3,0,0,0,0,0,0
-c4,0,0,221.625,240.28,461.905,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,221.625,240.28,461.905,0
-c11,0,0,221.625,0,221.625,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,206.48,150.132,356.612,0
-c15,0,0,0,240.28,240.28,0
-c16,213.981,0,221.625,240.28,675.886,0
-c17,213.981,0,0,0,213.981,0
-c18,0,0,0,150.132,150.132,0
-c19,0,0,0,150.132,150.132,0
-c20,213.981,0,221.625,0,435.606,0
-c21,213.981,0,0,150.132,364.113,0
-c22,213.981,170.807,206.48,150.132,741.4,0
-c23,0,0,206.48,0,206.48,0
-c24,213.981,170.807,0,0,384.788,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,213.981,170.807,0,0,384.788,0
-c28,0,0,221.625,0,221.625,0
-c29,0,0,0,0,0,0
-c30,213.981,170.807,0,240.28,625.068,0
-c31,213.981,0,0,0,213.981,0
-c32,0,220.445,221.625,240.28,682.35,0
-c33,0,0,206.48,0,206.48,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,51.784,51.784,0
-c2,0,0,36.6494,0,36.6494,0
-c3,0,0,0,0,0,0
-c4,0,0,36.6494,51.784,88.4334,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,36.6494,51.784,88.4334,0
-c11,0,0,36.6494,0,36.6494,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,49.4496,36.7521,86.2017,0
-c15,0,0,0,51.784,51.784,0
-c16,135.389,0,36.6494,51.784,223.8224,0
-c17,135.389,0,0,0,135.389,0
-c18,0,0,0,36.7521,36.7521,0
-c19,0,0,0,36.7521,36.7521,0
-c20,135.389,0,36.6494,0,172.0384,0
-c21,135.389,0,0,36.7521,172.1411,0
-c22,135.389,36.4601,49.4496,36.7521,258.0508,0
-c23,0,0,49.4496,0,49.4496,0
-c24,135.389,36.4601,0,0,171.8491,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,135.389,36.4601,0,0,171.8491,0
-c28,0,0,36.6494,0,36.6494,0
-c29,0,0,0,0,0,0
-c30,135.389,36.4601,0,51.784,223.6331,0
-c31,135.389,0,0,0,135.389,0
-c32,0,46.0741,36.6494,51.784,134.5075,0
-c33,0,0,49.4496,0,49.4496,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c1,18.850157,1604.397,2994.0318,1376.4335,5993.712457,1.52295705094154
-c2,18.850157,1508.2307,3030.6812,1272.0414,5829.803457,1.56577605244341
-c3,18.850157,1604.397,2994.0318,1272.0414,5889.320357,1.5499524718765
-c4,18.850157,1508.2307,3030.6812,1376.4335,5934.195557,1.53823151908252
-c5,18.850157,1604.397,18.850157,1272.0414,2914.138714,3.13237199139135
-c6,18.850157,1508.2307,18.850157,1324.6495,2870.580514,3.17990261464226
-c7,18.850157,1604.397,18.850157,1324.6495,2966.746814,3.07682693017204
-c8,18.850157,1508.2307,18.850157,1272.0414,2817.972414,3.23926750692218
-c9,18.850157,1604.397,2994.0318,1272.0414,5889.320357,1.5499524718765
-c10,18.850157,1508.2307,3030.6812,1376.4335,5934.195557,1.53823151908252
-c11,18.850157,1508.2307,3030.6812,1272.0414,5829.803457,1.56577605244341
-c12,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c13,18.850157,1604.397,2994.0318,1272.0414,5889.320357,1.5499524718765
-c14,18.850157,1604.397,2637.7528,1308.7935,5569.793457,1.6388698623359
-c15,18.850157,1604.397,2994.0318,1376.4335,5993.712457,1.52295705094154
-c16,3083.3053,1508.2307,3030.6812,1376.4335,8998.6507,1.01439282431095
-c17,3083.3053,1508.2307,18.850157,1272.0414,5882.427557,1.55176864591571
-c18,18.850157,1508.2307,2588.3032,1308.7935,5424.177557,1.68286648727664
-c19,18.850157,1508.2307,2588.3032,1308.7935,5424.177557,1.68286648727664
-c20,3083.3053,1508.2307,3030.6812,1272.0414,8894.2586,1.02629877406197
-c21,3083.3053,1508.2307,2588.3032,1308.7935,8488.6327,1.07534004769296
-c22,3083.3053,1640.8571,2637.7528,1308.7935,8670.7087,1.05275900858301
-c23,18.850157,1604.397,2637.7528,1324.6495,5585.649457,1.63421759758639
-c24,3083.3053,1640.8571,2994.0318,1272.0414,8990.2356,1.01534232300495
-c25,3257.6966,1604.397,18.850157,1324.6495,6205.593257,1.47095793663362
-c26,18.850157,1508.2307,2588.3032,1324.6495,5440.033557,1.67796145677413
-c27,3083.3053,1640.8571,18.850157,1272.0414,6015.053957,1.5175535769919
-c28,18.850157,1508.2307,3030.6812,1272.0414,5829.803457,1.56577605244341
-c29,18.850157,1508.2307,2588.3032,1324.6495,5440.033557,1.67796145677413
-c30,3083.3053,1640.8571,2994.0318,1376.4335,9094.6277,1.00368778148348
-c31,3083.3053,1508.2307,18.850157,1324.6495,5935.035657,1.53801378352841
-c32,3257.6966,1554.3048,3030.6812,1376.4335,9219.1161,0.990134694256267
-c33,3257.6966,1604.397,2637.7528,1324.6495,8824.4959,1.03441225425225
-c8,2817.972414
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,0,0,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,0,0,0,0,0,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,0,0,0
-c28,0,0,0,0,0,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,0,0,0,0,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_results1.csv b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_results1.csv
deleted file mode 100644
index 1e8d11387a221361bf025c7ff0716709f5cd82b0..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_results1.csv
+++ /dev/null
@@ -1,143 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c1,13508.93,6763.525,14105.615,4989.03,39367.1,1.21222975222399
-c2,13508.93,6763.525,14105.615,4989.03,39367.1,1.21222975222399
-c3,13508.93,8048.708,14105.615,4989.03,40652.283,1.17390626948576
-c4,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c5,16306.105,8048.708,17540.639,4989.03,46884.482,1.01786279516139
-c6,13508.93,8048.708,17540.639,5826.518,44924.795,1.06226349822573
-c7,13508.93,6763.525,14105.615,5826.518,40204.588,1.186978209584
-c8,13508.93,6763.525,17540.639,5826.518,43639.612,1.09354707119406
-c1,39367.1
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c1,2947.9163,1508.2307,2588.3032,1324.6495,8369.0997,1.09069876308561
-c2,2947.9163,1508.2307,2588.3032,1324.6495,8369.0997,1.09069876308561
-c3,2947.9163,1604.397,2588.3032,1324.6495,8465.266,1.07830831212736
-c4,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c5,3257.6966,1604.397,2994.0318,1324.6495,9180.7749,0.994269743022784
-c6,2947.9163,1604.397,2994.0318,1272.0414,8818.3865,1.0351288976149
-c7,2947.9163,1508.2307,2588.3032,1272.0414,8316.4916,1.0975982576884
-c8,2947.9163,1508.2307,2994.0318,1272.0414,8722.2202,1.04654164719962
-c7,8316.4916
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c1,13722.911,6763.525,14105.615,4989.03,39581.081,1.20567626436055
-c2,13722.911,6763.525,14105.615,4989.03,39581.081,1.20567626436055
-c3,13722.911,8219.515,14312.095,4989.03,41243.551,1.15707713635745
-c4,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c5,16306.105,8048.708,17540.639,5229.31,47124.762,1.0126729106607
-c6,13722.911,8219.515,17540.639,5826.518,45309.583,1.05324231067577
-c7,13722.911,6763.525,14105.615,5976.65,40568.701,1.17632481952941
-c8,13722.911,6763.525,17762.264,5826.518,44075.218,1.08273928200936
-c1,39581.081
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,213.981,0,0,0,213.981,0
-c2,213.981,0,0,0,213.981,0
-c3,213.981,170.807,206.48,0,591.268,0
-c4,0,0,0,0,0,0
-c5,0,0,0,240.28,240.28,0
-c6,213.981,170.807,0,0,384.788,0
-c7,213.981,0,0,150.132,364.113,0
-c8,213.981,0,221.625,0,435.606,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,135.389,0,0,0,135.389,0
-c2,135.389,0,0,0,135.389,0
-c3,135.389,36.4601,49.4496,0,221.2987,0
-c4,0,0,0,0,0,0
-c5,0,0,0,51.784,51.784,0
-c6,135.389,36.4601,0,0,171.8491,0
-c7,135.389,0,0,36.7521,172.1411,0
-c8,135.389,0,36.6494,0,172.0384,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c1,3083.3053,1508.2307,2588.3032,1324.6495,8504.4887,1.07333515448924
-c2,3083.3053,1508.2307,2588.3032,1324.6495,8504.4887,1.07333515448924
-c3,3083.3053,1640.8571,2637.7528,1324.6495,8686.5647,1.05083735747876
-c4,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c5,3257.6966,1604.397,2994.0318,1376.4335,9232.5589,0.988693037325838
-c6,3083.3053,1640.8571,2994.0318,1272.0414,8990.2356,1.01534232300495
-c7,3083.3053,1508.2307,2588.3032,1308.7935,8488.6327,1.07534004769296
-c8,3083.3053,1508.2307,3030.6812,1272.0414,8894.2586,1.02629877406197
-c7,8488.6327
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_results2.csv b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_results2.csv
deleted file mode 100644
index 870bb0106874567d837a62a781422af940cd1631..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_results2.csv
+++ /dev/null
@@ -1,154 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c1,15.343114,6763.525,14105.615,4989.03,25873.513114,1.84443332473991
-c2,15.343114,6763.525,14105.615,4989.03,25873.513114,1.84443332473991
-c3,15.343114,6763.525,17540.639,5826.518,30146.025114,1.58302693841832
-c4,15.343114,8048.708,17540.639,5826.518,31431.208114,1.51829893636554
-c5,15.343114,6763.525,14105.615,4989.03,25873.513114,1.84443332473991
-c6,15.343114,6763.525,14105.615,4989.03,25873.513114,1.84443332473991
-c7,15.343114,8048.708,17540.639,5826.518,31431.208114,1.51829893636554
-c8,15.343114,8048.708,17540.639,5826.518,31431.208114,1.51829893636554
-c9,15.343114,6763.525,14105.615,5826.518,26711.001114,1.78660356523767
-c1,25873.513114
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c1,7.919790,1508.2307,2588.3032,1324.6495,5429.10319,1.68133968215587
-c2,7.919790,1508.2307,2588.3032,1324.6495,5429.10319,1.68133968215587
-c3,7.919790,1508.2307,2994.0318,1272.0414,5782.22369,1.57866024068224
-c4,7.919790,1604.397,2994.0318,1272.0414,5878.38999,1.55283447682867
-c5,7.919790,1508.2307,2588.3032,1324.6495,5429.10319,1.68133968215587
-c6,7.919790,1508.2307,2588.3032,1324.6495,5429.10319,1.68133968215587
-c7,7.919790,1604.397,2994.0318,1272.0414,5878.38999,1.55283447682867
-c8,7.919790,1604.397,2994.0318,1272.0414,5878.38999,1.55283447682867
-c9,7.919790,1508.2307,2588.3032,1272.0414,5376.49509,1.69779130779805
-c9,5376.49509
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16306.105,8048.708,17540.639,5826.518,47721.97,0.999999997904529
-c1,90.458562,6763.525,14105.615,4989.03,25948.628562,1.83909410480276
-c2,90.458562,6763.525,14105.615,4989.03,25948.628562,1.83909410480276
-c3,90.458562,6763.525,17762.264,5826.518,30442.765562,1.56759640467123
-c4,90.458562,8048.708,17540.639,5826.518,31506.323562,1.5146791009945
-c5,90.458562,6763.525,14105.615,4989.03,25948.628562,1.83909410480276
-c6,90.458562,6763.525,14105.615,4989.03,25948.628562,1.83909410480276
-c7,90.458562,8048.708,17540.639,5826.518,31506.323562,1.5146791009945
-c8,90.458562,8048.708,17540.639,5826.518,31506.323562,1.5146791009945
-c9,90.458562,6763.525,14105.615,5976.65,26936.248562,1.77166355266549
-c1,25948.628562
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,28.951489,0,0,0,28.951489,0
-c2,28.951489,0,0,0,28.951489,0
-c3,28.951489,0,0,0,28.951489,0
-c4,28.951489,0,0,0,28.951489,0
-c5,28.951489,0,0,0,28.951489,0
-c6,28.951489,0,0,0,28.951489,0
-c7,28.951489,0,0,0,28.951489,0
-c8,28.951489,0,0,0,28.951489,0
-c9,28.951489,0,0,0,28.951489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,46.163959,0,0,0,46.163959,0
-c2,46.163959,0,0,0,46.163959,0
-c3,46.163959,0,0,0,46.163959,0
-c4,46.163959,0,0,0,46.163959,0
-c5,46.163959,0,0,0,46.163959,0
-c6,46.163959,0,0,0,46.163959,0
-c7,46.163959,0,0,0,46.163959,0
-c8,46.163959,0,0,0,46.163959,0
-c9,46.163959,0,0,0,46.163959,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,10.930367,0,0,0,10.930367,0
-c2,10.930367,0,0,0,10.930367,0
-c3,10.930367,0,0,0,10.930367,0
-c4,10.930367,0,0,0,10.930367,0
-c5,10.930367,0,0,0,10.930367,0
-c6,10.930367,0,0,0,10.930367,0
-c7,10.930367,0,0,0,10.930367,0
-c8,10.930367,0,0,0,10.930367,0
-c9,10.930367,0,0,0,10.930367,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,221.625,0,221.625,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,150.132,150.132,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,36.6494,0,36.6494,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,36.7521,36.7521,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3257.6966,1604.397,2994.0318,1272.0414,9128.1668,0.999999989044898
-c1,18.850157,1508.2307,2588.3032,1324.6495,5440.033557,1.67796145677413
-c2,18.850157,1508.2307,2588.3032,1324.6495,5440.033557,1.67796145677413
-c3,18.850157,1508.2307,3030.6812,1272.0414,5829.803457,1.56577605244341
-c4,18.850157,1604.397,2994.0318,1272.0414,5889.320357,1.5499524718765
-c5,18.850157,1508.2307,2588.3032,1324.6495,5440.033557,1.67796145677413
-c6,18.850157,1508.2307,2588.3032,1324.6495,5440.033557,1.67796145677413
-c7,18.850157,1604.397,2994.0318,1272.0414,5889.320357,1.5499524718765
-c8,18.850157,1604.397,2994.0318,1272.0414,5889.320357,1.5499524718765
-c9,18.850157,1508.2307,2588.3032,1308.7935,5424.177557,1.68286648727664
-c9,5424.177557
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_tensors.txt b/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_tensors.txt
deleted file mode 100644
index 71dce21c4710dee6c0180112e14825189281bce6..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEMO/pipeline_GEMO_tensors.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-#Conv1,2
-Conv1,3221.85,16081.5,2915.2,13346.4,135.389,213.981,36.5344,219.647
-ClipRelu1,35.8466,224.605,32.7163,162.53,146.28,768.006,36.4137,177.541
-#Conv2,3
-Conv2,1531.77,7672.48,1441.62,6486.13,46.0741,220.445,36.4601,170.807
-Add1,36.9674,189.809,32.6334,133.401,150.837,630.018,36.4014,148.497
-ClipRelu2,35.6596,186.419,33.9773,143.994,45.9326,186.794,36.3771,153.809
-#Conv3,2
-Conv3,2958.33,17316.3,2555.58,13947.5,49.4496,206.48,36.6494,221.625
-ClipRelu3,35.7018,224.339,32.7232,158.115,164.164,848.865,36.4919,173.755
-#Conv4,2
-Conv4,1236.26,5658.48,1291.92,4870.81,51.784,240.28,36.7521,150.132
-ClipRelu4,35.7814,168.038,32.7295,118.22,163.003,595.319,36.6193,132.943
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_confs1.txt b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_confs1.txt
deleted file mode 100644
index 3c5d575879b4b851938894d007277b2e65fcacc4..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_confs1.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-9 9,9 9 9,9 9
-8 8,8 8 8,8 8
-8 8,8 8 8,9 9
-9 9,8 8 8,8 8
-8 8,9 9 9,9 9
-8 8,8 8 8,8 8
-9 9,9 9 9,8 8
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_confs2.txt b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_confs2.txt
deleted file mode 100644
index 6ebcf19aec794b6fd5bcb2db1348a9bda7df8ee9..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_confs2.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-9 9,9 9 9,9 9
-8 8,8 8 8,9 9
-8 8,8 8 8,8 8
-9 9,9 9 9,9 9
-9 9,9 9 9,8 8
-8 8,9 9 9,8 8
-9 9,8 8 8,8 8
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_fp16.csv b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_fp16.csv
deleted file mode 100644
index a506b2186159108b62d813ff72c9f5e080517b35..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_fp16.csv
+++ /dev/null
@@ -1,21 +0,0 @@
-Add1,32.6705,133.276,90.8676,42.4081,4079.4,2781.35,1298.05,0.274066,1.81083,1.3749,0.524813,44.8557,36.2428,11.5976
-Add1_f2h,152.652,636.419,466.532,169.887,4169.31,3056.39,1112.92,3.38804,12.9973,9.35441,3.72695,20.7109,19.0044,3.41117
-Add1_h2f,36.4091,148.094,97.9338,50.1598,4067.47,2689.79,1377.67,0.253791,1.95641,1.60863,0.461607,43.3346,38.2765,8.27312
-ClipRelu1,32.6587,162.45,120.933,41.5169,4974.22,3702.98,1271.23,0.330531,2.05041,1.57201,0.558148,43.869,36.143,11.1024
-ClipRelu1_f2h,146.743,769.608,612.403,157.206,5244.94,4173.62,1071.31,2.99355,13.4267,10.3296,3.13177,19.476,18.332,2.95025
-ClipRelu1_h2f,36.4146,177.696,128.645,49.0517,4879.8,3532.76,1347.03,0.279016,1.96727,1.57977,0.479455,37.5859,32.5144,8.48889
-ClipRelu2,34.0918,144.473,89.3027,55.1698,4237.69,2619.43,1618.26,0.352182,2.37489,1.66636,0.820247,49.3303,37.5143,16.197
-ClipRelu2_f2h,45.6929,185.575,118.34,67.2347,4061.46,2589.99,1471.47,0.516908,2.23797,1.56401,0.785261,33.2568,27.0842,8.72852
-ClipRelu2_h2f,36.3378,153.377,92.9806,60.3967,4220.92,2558.82,1662.1,0.172363,1.47966,1.20127,0.437181,40.4136,33.6048,10.527
-ClipRelu3,32.8262,117.875,75.0274,42.8476,3590.86,2285.58,1305.28,0.384277,1.99531,1.51246,0.60442,42.3563,36.5341,9.89428
-ClipRelu3_f2h,163.696,595.184,407.98,187.204,3636.27,2492.66,1143.61,4.28169,13.549,8.69333,4.92486,18.018,17.0336,3.84045
-ClipRelu3_h2f,36.522,132.298,82.3713,49.9263,3622.73,2255.58,1367.15,0.257881,2.4465,1.66724,0.87317,76.2326,50.7005,27.8681
-Conv1,2918.68,13346.8,10575.4,2771.38,4573.05,3623.51,949.542,19.0663,24.1442,11.5013,15.9164,22.9092,21.5007,2.65872
-Conv1_f2h,135.408,213.93,45.3082,168.622,1580.02,334.561,1245.45,3.68648,5.50091,2.08695,4.00579,14.7526,11.0198,13.018
-Conv1_h2f,36.4759,219.457,186.947,32.5105,6016.51,5125.23,891.287,0.249261,1.92932,1.63679,0.307051,34.8454,29.3924,6.0193
-Conv2,1440.02,6478.39,4945.38,1533.01,4498.87,3434.29,1064.58,5.09304,12.7265,13.2649,4.32559,18.6424,17.5201,1.96949
-Conv2_f2h,45.9656,219.948,153.574,66.3745,4785.2,3341.18,1444.02,0.569206,2.71891,1.96107,0.854423,37.0203,30.7544,8.97458
-Conv2_h2f,36.459,170.667,136.354,34.3128,4681.07,3739.93,941.134,0.267004,2.1182,1.68416,0.440793,47.758,38.0809,9.90734
-Conv3,1223.57,4572.94,3095.04,1477.9,3737.52,2529.64,1207.88,8.78168,18.7734,14.433,9.07594,21.7904,19.8837,3.12811
-Conv3_f2h,49.5704,206.506,121.53,84.976,4166.12,2451.84,1714.27,0.66723,2.51126,1.59248,1.12596,33.159,28.3058,8.69624
-Conv3_h2f,36.6344,149.378,113.062,36.3157,4077.55,3086.25,991.301,0.271434,1.29255,0.954645,0.352811,21.212,15.9903,5.95645
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_fp32.csv b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_fp32.csv
deleted file mode 100644
index 4bd84e896cea5e41ca36173932788c4f289d729e..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_fp32.csv
+++ /dev/null
@@ -1,7 +0,0 @@
-Add1,36.8972,189.335,150.257,39.0772,5131.39,4072.31,1059.08,0.22475,1.5892,1.25644,0.353501,27.5054,21.4014,6.89302
-ClipRelu1,35.8282,224.935,189.773,35.1614,6278.15,5296.76,981.388,0.266349,2.10296,1.76652,0.359258,34.6377,29.1787,6.49807
-ClipRelu2,35.6699,186.473,140.964,45.5092,5227.75,3951.91,1275.84,0.283793,1.78712,1.33686,0.468235,30.0455,22.8704,8.03787
-ClipRelu3,35.6563,165.27,125.599,39.6716,4635.07,3522.47,1112.6,0.291613,1.72066,1.28598,0.444424,26.11,19.6517,6.98899
-Conv1,3222.23,16082.5,13014.3,3068.26,4991.2,4038.98,952.22,15.3047,25.6007,16.0853,15.2341,18.3593,17.1448,3.32665
-Conv2,1532.25,7670.01,6138.24,1531.76,5005.79,4006.1,999.684,6.87238,10.0722,7.16904,6.53694,18.9873,18.0292,2.42076
-Conv3,1164.4,5092.98,3767.29,1325.69,4374.05,3235.53,1138.53,8.17306,13.3788,8.398,7.54984,21.8335,20.1398,2.64523
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_layers.txt b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_layers.txt
deleted file mode 100644
index b7d5c3cdcc00c398df81022401291a084d3f8ab8..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_layers.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Conv1,2000,1,240,300,1,1,9,9,1,1
-Conv2,2000,1,240,300,1,1,5,5,1,1
-Conv3,2000,1,240,300,1,1,3,3,1,1
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_ops.txt b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_ops.txt
deleted file mode 100644
index 7c5a3a1c50a80a638bf569b41f5d3ef2add5f224..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_ops.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-#Conv1,2
-Conv1
-ClipRelu1
-#Conv2,3
-Conv2
-Add1
-ClipRelu2
-#Conv3,2
-Conv3
-ClipRelu3
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_confs1.txt b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_confs1.txt
deleted file mode 100644
index 0d24c059750ac51d2e5a76925dfc5de8f4e3f059..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_confs1.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-9 9,9 9 9,9 9
-9 9,9 9 9,8 8
-9 9,8 8 8,9 9
-9 9,9 9 9,9 9
-9 9,8 8 8,8 8
-9 9,9 9 9,8 8
-8 8,8 8 8,9 9
-8 8,8 8 8,8 8
-8 8,9 9 9,8 8
-8 8,9 9 9,9 9
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_confs2.txt b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_confs2.txt
deleted file mode 100644
index ba2839b706b3718fd6bcf065409164737a878b08..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_confs2.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-9 9,9 9 9,9 9
-8 8,9 9 9,8 8
-9 9,8 8 8,9 9
-9 9,9 9 9,9 9
-9 9,8 8 8,8 8
-8 8,9 9 9,9 9
-9 9,9 9 9,8 8
-8 8,8 8 8,9 9
-8 8,8 8 8,8 8
-8 8,9 9 9,8 8
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_results1.csv b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_results1.csv
deleted file mode 100644
index d74e5b5acea78b727e36599c2cecb813971fb82c..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_results1.csv
+++ /dev/null
@@ -1,154 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c1,16307.435,8045.818,4690.815,29044.068,1.0195370324173
-c2,16307.435,6756.139,5258.25,28321.824,1.04553657615577
-c3,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c4,16307.435,6756.139,4690.815,27754.389,1.06691244016609
-c5,16307.435,8045.818,4690.815,29044.068,1.0195370324173
-c6,13509.25,6756.139,5258.25,25523.639,1.16015991622449
-c7,13509.25,6756.139,4690.815,24956.204,1.18653874128237
-c8,13509.25,8045.818,4690.815,26245.883,1.1282342029482
-c9,13509.25,8045.818,5258.25,26813.318,1.10435802423125
-c7,24956.204
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c1,3258.0582,1604.8171,1256.3962,6119.2715,0.990793021836128
-c2,3258.0582,1506.7823,1200.0563,5964.8968,1.01643527149648
-c3,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c4,3258.0582,1506.7823,1256.3962,6021.2367,1.00692462385801
-c5,3258.0582,1604.8171,1256.3962,6119.2715,0.990793021836128
-c6,2951.3387,1506.7823,1200.0563,5658.1773,1.07153437783693
-c7,2951.3387,1506.7823,1256.3962,5714.5172,1.06097003153705
-c8,2951.3387,1604.8171,1256.3962,5812.552,1.04307565690465
-c9,2951.3387,1604.8171,1200.0563,5756.2121,1.05328493622942
-c6,5658.1773
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c1,16307.435,8045.818,4897.321,29250.574,1.0123392073867
-c2,16307.435,6976.087,5407.628,28691.15,1.032077936813
-c3,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c4,16307.435,6976.087,4690.815,27974.337,1.05852384970366
-c5,16307.435,8045.818,4897.321,29250.574,1.0123392073867
-c6,13723.18,6756.139,5407.628,25886.947,1.14387775760549
-c7,13723.18,6756.139,4690.815,25170.134,1.17645392282594
-c8,13723.18,8216.485,4897.321,26836.986,1.10338407187982
-c9,13723.18,8216.485,5258.25,27197.915,1.08874165137754
-c7,25170.134
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,206.506,206.506,0
-c2,0,219.948,149.378,369.326,0
-c3,0,0,0,0,0
-c4,0,219.948,0,219.948,0
-c5,0,0,206.506,206.506,0
-c6,213.93,0,149.378,363.308,0
-c7,213.93,0,0,213.93,0
-c8,213.93,170.667,206.506,591.103,0
-c9,213.93,170.667,0,384.597,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,49.5704,49.5704,0
-c2,0,45.9656,36.6344,82.6,0
-c3,0,0,0,0,0
-c4,0,45.9656,0,45.9656,0
-c5,0,0,49.5704,49.5704,0
-c6,135.408,0,36.6344,172.0424,0
-c7,135.408,0,0,135.408,0
-c8,135.408,36.459,49.5704,221.4374,0
-c9,135.408,36.459,0,171.867,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c1,3258.0582,1604.8171,1305.9666,6168.8419,0.982831396881295
-c2,3258.0582,1552.7479,1236.6907,6047.4968,1.00255224603753
-c3,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c4,3258.0582,1552.7479,1256.3962,6067.2023,0.999296084139207
-c5,3258.0582,1604.8171,1305.9666,6168.8419,0.982831396881295
-c6,3086.7467,1506.7823,1236.6907,5830.2197,1.0399147558725
-c7,3086.7467,1506.7823,1256.3962,5849.9252,1.03641179828399
-c8,3086.7467,1641.2761,1305.9666,6033.9894,1.00479651149542
-c9,3086.7467,1641.2761,1200.0563,5928.0791,1.02274807664513
-c6,5830.2197
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_results2.csv b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_results2.csv
deleted file mode 100644
index 2f278d260729ab866e55bc325c2b7db70ab87d4b..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_promise_results2.csv
+++ /dev/null
@@ -1,154 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c1,13509.25,8045.818,4690.815,26245.883,1.1282342029482
-c2,16307.435,6756.139,5258.25,28321.824,1.04553657615577
-c3,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c4,16307.435,6756.139,4690.815,27754.389,1.06691244016609
-c5,13509.25,8045.818,5258.25,26813.318,1.10435802423125
-c6,16307.435,8045.818,4690.815,29044.068,1.0195370324173
-c7,13509.25,6756.139,5258.25,25523.639,1.16015991622449
-c8,13509.25,6756.139,4690.815,24956.204,1.18653874128237
-c9,13509.25,8045.818,4690.815,26245.883,1.1282342029482
-c8,24956.204
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c1,2951.3387,1604.8171,1256.3962,5812.552,1.04307565690465
-c2,3258.0582,1506.7823,1200.0563,5964.8968,1.01643527149648
-c3,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c4,3258.0582,1506.7823,1256.3962,6021.2367,1.00692462385801
-c5,2951.3387,1604.8171,1200.0563,5756.2121,1.05328493622942
-c6,3258.0582,1604.8171,1256.3962,6119.2715,0.990793021836128
-c7,2951.3387,1506.7823,1200.0563,5658.1773,1.07153437783693
-c8,2951.3387,1506.7823,1256.3962,5714.5172,1.06097003153705
-c9,2951.3387,1604.8171,1256.3962,5812.552,1.04307565690465
-c7,5658.1773
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c1,13723.18,8216.485,4897.321,26836.986,1.10338407187982
-c2,16307.435,6976.087,5407.628,28691.15,1.032077936813
-c3,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c4,16307.435,6976.087,4690.815,27974.337,1.05852384970366
-c5,13723.18,8216.485,5258.25,27197.915,1.08874165137754
-c6,16307.435,8045.818,4897.321,29250.574,1.0123392073867
-c7,13723.18,6756.139,5407.628,25886.947,1.14387775760549
-c8,13723.18,6756.139,4690.815,25170.134,1.17645392282594
-c9,13723.18,8216.485,4897.321,26836.986,1.10338407187982
-c8,25170.134
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,213.93,170.667,206.506,591.103,0
-c2,0,219.948,149.378,369.326,0
-c3,0,0,0,0,0
-c4,0,219.948,0,219.948,0
-c5,213.93,170.667,0,384.597,0
-c6,0,0,206.506,206.506,0
-c7,213.93,0,149.378,363.308,0
-c8,213.93,0,0,213.93,0
-c9,213.93,170.667,206.506,591.103,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,135.408,36.459,49.5704,221.4374,0
-c2,0,45.9656,36.6344,82.6,0
-c3,0,0,0,0,0
-c4,0,45.9656,0,45.9656,0
-c5,135.408,36.459,0,171.867,0
-c6,0,0,49.5704,49.5704,0
-c7,135.408,0,36.6344,172.0424,0
-c8,135.408,0,0,135.408,0
-c9,135.408,36.459,49.5704,221.4374,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c1,3086.7467,1641.2761,1305.9666,6033.9894,1.00479651149542
-c2,3258.0582,1552.7479,1236.6907,6047.4968,1.00255224603753
-c3,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c4,3258.0582,1552.7479,1256.3962,6067.2023,0.999296084139207
-c5,3086.7467,1641.2761,1200.0563,5928.0791,1.02274807664513
-c6,3258.0582,1604.8171,1305.9666,6168.8419,0.982831396881295
-c7,3086.7467,1506.7823,1236.6907,5830.2197,1.0399147558725
-c8,3086.7467,1506.7823,1256.3962,5849.9252,1.03641179828399
-c9,3086.7467,1641.2761,1305.9666,6033.9894,1.00479651149542
-c7,5830.2197
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_results1.csv b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_results1.csv
deleted file mode 100644
index cdda603e1802f9a53b8b54138515783f87671e43..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_results1.csv
+++ /dev/null
@@ -1,121 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c1,13509.25,6756.139,4690.815,24956.204,1.18653874128237
-c2,13509.25,6756.139,5258.25,25523.639,1.16015991622449
-c3,16307.435,6756.139,4690.815,27754.389,1.06691244016609
-c4,13509.25,8045.818,5258.25,26813.318,1.10435802423125
-c5,13509.25,6756.139,4690.815,24956.204,1.18653874128237
-c6,16307.435,8045.818,4690.815,29044.068,1.0195370324173
-c1,24956.204
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c1,2951.3387,1506.7823,1256.3962,5714.5172,1.06097003153705
-c2,2951.3387,1506.7823,1200.0563,5658.1773,1.07153437783693
-c3,3258.0582,1506.7823,1256.3962,6021.2367,1.00692462385801
-c4,2951.3387,1604.8171,1200.0563,5756.2121,1.05328493622942
-c5,2951.3387,1506.7823,1256.3962,5714.5172,1.06097003153705
-c6,3258.0582,1604.8171,1256.3962,6119.2715,0.990793021836128
-c2,5658.1773
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c1,13723.18,6756.139,4690.815,25170.134,1.17645392282594
-c2,13723.18,6756.139,5407.628,25886.947,1.14387775760549
-c3,16307.435,6976.087,4690.815,27974.337,1.05852384970366
-c4,13723.18,8216.485,5258.25,27197.915,1.08874165137754
-c5,13723.18,6756.139,4690.815,25170.134,1.17645392282594
-c6,16307.435,8045.818,4897.321,29250.574,1.0123392073867
-c1,25170.134
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,213.93,0,0,213.93,0
-c2,213.93,0,149.378,363.308,0
-c3,0,219.948,0,219.948,0
-c4,213.93,170.667,0,384.597,0
-c5,213.93,0,0,213.93,0
-c6,0,0,206.506,206.506,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,135.408,0,0,135.408,0
-c2,135.408,0,36.6344,172.0424,0
-c3,0,45.9656,0,45.9656,0
-c4,135.408,36.459,0,171.867,0
-c5,135.408,0,0,135.408,0
-c6,0,0,49.5704,49.5704,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c1,3086.7467,1506.7823,1256.3962,5849.9252,1.03641179828399
-c2,3086.7467,1506.7823,1236.6907,5830.2197,1.0399147558725
-c3,3258.0582,1552.7479,1256.3962,6067.2023,0.999296084139207
-c4,3086.7467,1641.2761,1200.0563,5928.0791,1.02274807664513
-c5,3086.7467,1506.7823,1256.3962,5849.9252,1.03641179828399
-c6,3258.0582,1604.8171,1305.9666,6168.8419,0.982831396881295
-c2,5830.2197
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_results2.csv b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_results2.csv
deleted file mode 100644
index 3772196ed94a052d40cee85ef52b9566646aa63d..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_results2.csv
+++ /dev/null
@@ -1,121 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c1,13509.25,6756.139,5258.25,25523.639,1.16015991622449
-c2,13509.25,6756.139,4690.815,24956.204,1.18653874128237
-c3,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c4,16307.435,8045.818,4690.815,29044.068,1.0195370324173
-c5,13509.25,8045.818,4690.815,26245.883,1.1282342029482
-c6,16307.435,6756.139,4690.815,27754.389,1.06691244016609
-c2,24956.204
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c1,2951.3387,1506.7823,1200.0563,5658.1773,1.07153437783693
-c2,2951.3387,1506.7823,1256.3962,5714.5172,1.06097003153705
-c3,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c4,3258.0582,1604.8171,1256.3962,6119.2715,0.990793021836128
-c5,2951.3387,1604.8171,1256.3962,5812.552,1.04307565690465
-c6,3258.0582,1506.7823,1256.3962,6021.2367,1.00692462385801
-c1,5658.1773
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c1,13723.18,6756.139,5407.628,25886.947,1.14387775760549
-c2,13723.18,6756.139,4690.815,25170.134,1.17645392282594
-c3,16307.435,8045.818,5258.25,29611.503,0.999999996622934
-c4,16307.435,8045.818,4897.321,29250.574,1.0123392073867
-c5,13723.18,8216.485,4897.321,26836.986,1.10338407187982
-c6,16307.435,6976.087,4690.815,27974.337,1.05852384970366
-c2,25170.134
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,213.93,0,149.378,363.308,0
-c2,213.93,0,0,213.93,0
-c3,0,0,0,0,0
-c4,0,0,206.506,206.506,0
-c5,213.93,170.667,206.506,591.103,0
-c6,0,219.948,0,219.948,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,135.408,0,36.6344,172.0424,0
-c2,135.408,0,0,135.408,0
-c3,0,0,0,0,0
-c4,0,0,49.5704,49.5704,0
-c5,135.408,36.459,49.5704,221.4374,0
-c6,0,45.9656,0,45.9656,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c1,3086.7467,1506.7823,1236.6907,5830.2197,1.0399147558725
-c2,3086.7467,1506.7823,1256.3962,5849.9252,1.03641179828399
-c3,3258.0582,1604.8171,1200.0563,6062.9316,0.999999983506329
-c4,3258.0582,1604.8171,1305.9666,6168.8419,0.982831396881295
-c5,3086.7467,1641.2761,1305.9666,6033.9894,1.00479651149542
-c6,3258.0582,1552.7479,1256.3962,6067.2023,0.999296084139207
-c1,5830.2197
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_tensors.txt b/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_tensors.txt
deleted file mode 100644
index 1b0a2e35250a19f38f2c4eb020d2d79038f3b038..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEO/pipeline_GEO_tensors.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-#Conv1,2
-Conv1,3222.23,16082.5,2918.68,13346.8,135.408,213.93,36.4759,219.457
-ClipRelu1,35.8282,224.935,32.6587,162.45,146.743,769.608,36.4146,177.696
-#Conv2,3
-Conv2,1532.25,7670.01,1440.02,6478.39,45.9656,219.948,36.459,170.667
-Add1,36.8972,189.335,32.6705,133.276,152.652,636.419,36.4091,148.094
-ClipRelu2,35.6699,186.473,34.0918,144.473,45.6929,185.575,36.3378,153.377
-#Conv3,2
-Conv3,1164.4,5092.98,1223.57,4572.94,49.5704,206.506,36.6344,149.378
-ClipRelu3,35.6563,165.27,32.8262,117.875,163.696,595.184,36.522,132.298
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_confs1.txt b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_confs1.txt
deleted file mode 100644
index da21e938c3aab5ec3f870ef5bdce0fb0e6457808..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_confs1.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-9 9,9 9 9,9 9,9 9
-8 8,8 8 8,8 8,7
-8 8,8 8 8,8 8,7
-8 8,8 8 8,9 9,7
-9 9,9 9 9,9 9,7
-8 8,8 8 8,8 8,7
-8 8,8 8 8,8 8,6
-8 8,8 8 8,9 9,6
-9 9,9 9 9,9 9,6
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_confs2.txt b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_confs2.txt
deleted file mode 100644
index e004da0e84fe4a8f72bf37eccf5c2ce353263551..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_confs2.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-9 9,9 9 9,9 9,9 9
-8 8,8 8 8,8 8,6
-8 8,8 8 8,9 9,6
-9 9,9 9 9,9 9,6
-8 8,8 8 8,8 8,6
-8 8,8 8 8,8 8,5
-9 9,9 9 9,9 9,5
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_fp16.csv b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_fp16.csv
deleted file mode 100644
index 8f1731ca860ca20f23d2c879d4a19031ac96772d..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_fp16.csv
+++ /dev/null
@@ -1,27 +0,0 @@
-Add1,32.585,132.874,90.5805,42.2932,4077.85,2779.91,1297.94,0.267716,1.60873,1.24676,0.493805,44.9777,37.3317,11.3155
-Add1_f2h,152.052,635.102,465.63,169.472,4177.23,3062.65,1114.59,3.70789,13.4672,9.54204,4.05265,24.3579,22.8349,4.12229
-Add1_h2f,36.3936,148.401,98.2724,50.1289,4077.66,2700.25,1377.41,0.24462,1.89046,1.57426,0.469717,42.8092,38.0548,9.49759
-ClipRelu1,32.7405,163.037,121.339,41.6976,4979.72,3706.16,1273.55,0.438457,2.75189,2.11249,0.739841,56.149,47.98,12.1784
-ClipRelu1_f2h,146.657,769.821,612.394,157.427,5249.64,4176.19,1073.45,3.76174,16.813,12.8582,4.0054,23.1617,22.2292,4.05231
-ClipRelu1_h2f,36.3842,177.909,128.755,49.154,4889.74,3538.77,1350.97,0.206959,1.67567,1.40286,0.463424,36.4341,32.4012,10.492
-ClipRelu2,34.0553,144.51,89.3398,55.1703,4243.37,2623.36,1620,0.391793,2.26213,1.54527,0.812253,42.5212,33.0461,13.097
-ClipRelu2_f2h,45.6549,185.567,118.399,67.1677,4064.67,2593.46,1471.21,0.523371,2.17308,1.52455,0.833304,31.5794,27.8107,8.31535
-ClipRelu2_h2f,36.3736,153.724,93.2158,60.5083,4226.21,2562.7,1663.51,0.252782,1.79709,1.32467,0.589658,34.7854,29.0642,9.549
-ClipRelu3,32.7854,118.216,75.162,43.0543,3605.75,2292.56,1313.18,0.506606,3.97908,2.57351,1.46189,107.077,70.5545,38.4207
-ClipRelu3_f2h,165.958,602.927,413.07,189.857,3633.17,2489.15,1144.03,3.02065,10.2904,6.9028,3.47812,25.9071,19.545,7.72569
-ClipRelu3_h2f,36.5443,132.298,82.3714,49.9267,3621.12,2254.57,1366.55,0.600315,1.56585,1.17732,0.515657,68.5219,46.0293,24.9645
-ClipRelu4,32.7184,153.056,112.078,40.9786,4678,3425.53,1252.46,0.154794,1.68025,1.41169,0.366783,47.2146,40.8675,9.29592
-ClipRelu4_f2h,165.584,825.553,648.957,176.596,4986.3,3919.79,1066.5,4.67005,19.877,14.903,5.01446,23.5566,23.11,3.43511
-ClipRelu4_h2f,36.6609,169.727,120.701,49.0256,4629.64,3292.37,1337.27,0.289045,1.92183,1.53364,0.497796,37.7378,32.9471,8.48898
-Conv1,2915.93,13352.1,10576.6,2775.47,4579.12,3627.28,951.839,15.8558,18.3107,9.62974,12.2137,20.0909,18.348,2.75611
-Conv1_f2h,135.06,214.749,45.2804,169.468,1590.14,335.188,1254.95,3.27252,4.99991,2.01686,3.41797,16.3964,10.1737,14.2329
-Conv1_h2f,36.5275,220.001,187.373,32.6287,6022.94,5129.68,893.265,0.32198,2.10465,1.77041,0.358425,31.9138,27.186,5.94593
-Conv2,1440.78,6485.81,4949.68,1536.13,4501.67,3435.49,1066.18,5.5314,12.769,13.1432,5.45702,18.9795,18.0565,2.37007
-Conv2_f2h,45.7961,219.712,153.373,66.339,4797.66,3349.09,1448.57,0.526669,2.73821,2.03676,0.853528,32.5454,29.6105,7.99933
-Conv2_h2f,36.4586,170.747,136.394,34.3529,4683.3,3741.06,942.241,0.257013,1.47757,1.17259,0.319922,23.0558,18.3627,5.39354
-Conv3,1209.29,4559.4,3094.84,1464.56,3770.47,2559.36,1211.1,9.26546,17.9525,14.4448,9.49139,24.2389,22.3724,2.90811
-Conv3_f2h,49.3758,206.121,121.426,84.6944,4174.75,2459.42,1715.33,0.792172,3.13741,2.04183,1.32468,38.1041,33.4828,8.37125
-Conv3_h2f,36.5783,149.412,113.105,36.3067,4084.72,3092.15,992.574,0.207318,1.1695,0.884863,0.308639,23.5563,18.2546,6.207
-Conv4,2636.43,13346.5,10809.1,2537.42,5062.38,4099.94,962.445,9.2555,18.7556,14.6325,10.0121,15.2368,15.0861,1.65147
-Conv4_f2h,52.0572,187.688,112.685,75.0037,3605.8,2164.97,1440.83,1.39581,4.67122,2.79345,2.01277,38.9395,32.0697,10.8245
-Conv4_h2f,36.6862,213.45,180.703,32.7466,5818.23,4925.62,892.61,0.176256,1.64194,1.38908,0.264839,31.3851,26.7179,5.1348
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_fp32.csv b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_fp32.csv
deleted file mode 100644
index 32c71387d272af1306da6391e268ab0d1e7690c9..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_fp32.csv
+++ /dev/null
@@ -1,9 +0,0 @@
-Add1,36.9761,189.903,150.681,39.2229,5135.79,4075.05,1060.74,0.327484,2.16644,1.67428,0.505853,27.874,21.2818,7.37459
-ClipRelu1,35.896,225.112,189.944,35.1678,6271.16,5291.45,979.702,0.3556,2.8365,2.39001,0.458028,39.0597,32.8052,6.86919
-ClipRelu2,35.8218,187.167,141.441,45.7264,5226.02,3949.26,1276.76,0.560645,3.09466,2.31204,0.794514,103.938,77.8175,26.3982
-ClipRelu3,35.7282,165.939,126.097,39.8416,4644.45,3529.32,1115.12,0.435428,2.30666,1.73936,0.576096,26.442,19.9043,7.12496
-ClipRelu4,35.8348,218.266,183.176,35.0905,6090.92,5111.69,979.23,0.294607,2.10359,1.7505,0.370725,32.8505,27.2303,6.44718
-Conv1,3230.39,16104.1,13023.8,3080.3,4985.31,4031.77,953.548,17.2527,24.4108,13.9864,14.0096,20.57,18.9428,2.87602
-Conv2,1531.88,7675.32,6141.51,1533.8,5010.48,4009.22,1001.26,7.36237,11.0004,7.1979,6.76237,19.5752,18.4653,2.44314
-Conv3,1163.22,5096.2,3770.21,1325.99,4381.36,3241.41,1139.94,10.0955,15.8531,9.13125,10.5922,27.2638,25.85,3.25685
-Conv4,3022.09,16672.3,13850.5,2821.84,5516.86,4583.12,933.739,7.95461,13.8577,12.9997,6.94323,13.4733,12.9753,1.37836
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_layers.txt b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_layers.txt
deleted file mode 100644
index aa19e9d56f9b6f6d681a08bf5f51a8e6aecd36d8..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_layers.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Conv1,2000,1,240,300,1,1,9,9,1,1
-Conv2,2000,1,240,300,1,1,5,5,1,1
-Conv3,2000,1,240,300,1,1,3,3,1,1
-Conv4,2000,1,240,300,1,1,9,9,1,1
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_ops.txt b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_ops.txt
deleted file mode 100644
index 0807a77c985bd73c6c538a1259b2ffc44eeda73a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_ops.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-#Conv1,2
-Conv1
-ClipRelu1
-#Conv2,3
-Conv2
-Add1
-ClipRelu2
-#Conv3,2
-Conv3
-ClipRelu3
-#Conv4,2
-Conv4
-ClipRelu4
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_confs1.txt b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_confs1.txt
deleted file mode 100644
index 680259f083a6ec52920bb2ce22dbd0f1a50200b3..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_confs1.txt
+++ /dev/null
@@ -1,46 +0,0 @@
-9 9,9 9 9,9 9,9 9
-8 8,8 8 8,9 9,3
-8 8,8 8 8,8 8,3
-8 8,9 9 9,9 9,3
-8 8,8 8 8,8 8,3
-8 8,9 9 9,8 8,3
-9 9,9 9 9,8 8,3
-9 9,9 9 9,9 9,3
-9 9,8 8 8,8 8,3
-9 9,8 8 8,9 9,3
-8 8,9 9 9,9 9,8 8
-8 8,9 9 9,9 9,6
-8 8,9 9 9,8 8,6
-8 8,9 9 9,9 9,7
-8 8,9 9 9,9 9,4
-8 8,8 8 8,9 9,6
-8 8,9 9 9,8 8,5
-8 8,8 8 8,8 8,8 8
-8 8,9 9 9,8 8,8 8
-9 9,9 9 9,8 8,6
-8 8,8 8 8,8 8,5
-8 8,8 8 8,8 8,4
-9 9,8 8 8,8 8,5
-8 8,8 8 8,8 8,6
-8 8,8 8 8,9 9,5
-8 8,9 9 9,9 9,9 9
-8 8,9 9 9,8 8,9 9
-8 8,8 8 8,8 8,7
-8 8,8 8 8,9 9,4
-8 8,8 8 8,8 8,9 9
-9 9,8 8 8,8 8,6
-9 9,9 9 9,8 8,5
-8 8,9 9 9,8 8,7
-9 9,9 9 9,9 9,9 9
-9 9,8 8 8,8 8,7
-8 8,8 8 8,9 9,8 8
-8 8,9 9 9,9 9,8 8
-8 8,9 9 9,8 8,4
-8 8,8 8 8,9 9,7
-9 9,9 9 9,9 9,6
-8 8,8 8 8,9 9,9 9
-8 8,9 9 9,9 9,5
-9 9,8 8 8,9 9,9 9
-9 9,8 8 8,9 9,7
-9 9,8 8 8,8 8,4
-9 9,9 9 9,8 8,4
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_confs2.txt b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_confs2.txt
deleted file mode 100644
index 1e0ff4012e24dd4b11fa809d00f55bf49a2d0656..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_confs2.txt
+++ /dev/null
@@ -1,44 +0,0 @@
-9 9,9 9 9,9 9,9 9
-8 8,8 8 8,8 8,5
-9 9,9 9 9,9 9,9 9
-9 9,8 8 8,8 8,5
-8 8,8 8 8,8 8,4
-8 8,8 8 8,8 8,7
-8 8,8 8 8,8 8,6
-8 8,9 9 9,8 8,9 9
-8 8,9 9 9,8 8,8 8
-9 9,8 8 8,8 8,8 8
-8 8,8 8 8,9 9,4
-8 8,9 9 9,8 8,4
-9 9,9 9 9,8 8,4
-8 8,9 9 9,9 9,4
-8 8,8 8 8,9 9,9 9
-8 8,8 8 8,9 9,2
-9 9,8 8 8,9 9,9 9
-8 8,8 8 8,8 8,9 9
-8 8,8 8 8,8 8,5
-8 8,8 8 8,9 9,5
-8 8,8 8 8,8 8,8 8
-8 8,8 8 8,8 8,3
-9 9,8 8 8,8 8,2
-8 8,9 9 9,9 9,6
-9 9,8 8 8,8 8,4
-8 8,9 9 9,8 8,3
-9 9,8 8 8,9 9,7
-9 9,8 8 8,9 9,4
-9 9,9 9 9,8 8,8 8
-8 8,8 8 8,8 8,2
-8 8,8 8 8,9 9,6
-8 8,8 8 8,9 9,3
-9 9,9 9 9,9 9,5
-9 9,9 9 9,9 9,2
-8 8,8 8 8,9 9,7
-8 8,9 9 9,9 9,3
-8 8,9 9 9,9 9,2
-9 9,9 9 9,9 9,7
-8 8,9 9 9,8 8,2
-8 8,9 9 9,9 9,5
-9 9,8 8 8,9 9,6
-8 8,9 9 9,8 8,5
-8 8,9 9 9,8 8,6
-8 8,9 9 9,8 8,7
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_results1.csv b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_results1.csv
deleted file mode 100644
index 282361a378d03d391107aeedc3c70df4d54b21fc..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_results1.csv
+++ /dev/null
@@ -1,550 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c1,13515.137,6763.194,5262.139,5.046064,25545.516064,1.82162328219378
-c2,13515.137,6763.194,4677.616,5.046064,24960.993064,1.86428106823546
-c3,13515.137,8052.39,5262.139,5.046064,26834.712064,1.73410866923432
-c4,13515.137,6763.194,4677.616,5.046064,24960.993064,1.86428106823546
-c5,13515.137,8052.39,4677.616,5.046064,26250.189064,1.77272273008295
-c6,16329.212,8052.39,4677.616,5.046064,29064.264064,1.60108326628957
-c7,16329.212,8052.39,5262.139,5.046064,29648.787064,1.56951806300201
-c8,16329.212,6763.194,4677.616,5.046064,27775.068064,1.67539848058103
-c9,16329.212,6763.194,5262.139,5.046064,28359.591064,1.64086663770637
-c10,13515.137,8052.39,5262.139,13499.556,40329.222,1.15386076340907
-c11,13515.137,8052.39,5262.139,11.525024,26841.191024,1.73369008793322
-c12,13515.137,8052.39,4677.616,11.525024,26256.668024,1.7722853021654
-c13,13515.137,8052.39,5262.139,15.343114,26845.009114,1.73344351007828
-c14,13515.137,8052.39,5262.139,6.799450,26836.46545,1.73399536959516
-c15,13515.137,6763.194,5262.139,11.525024,25551.995024,1.82116139167122
-c16,13515.137,8052.39,4677.616,7.585986,26252.728986,1.77255122115345
-c17,13515.137,6763.194,4677.616,13499.556,38455.503,1.21008186732057
-c18,13515.137,8052.39,4677.616,13499.556,39744.699,1.17083052718343
-c19,16329.212,8052.39,4677.616,11.525024,29070.743024,1.60072643487337
-c20,13515.137,6763.194,4677.616,7.585986,24963.532986,1.86409138641106
-c21,13515.137,6763.194,4677.616,6.799450,24962.74645,1.86415012093291
-c22,16329.212,6763.194,4677.616,7.585986,27777.607986,1.67524528591263
-c23,13515.137,6763.194,4677.616,11.525024,24967.472024,1.86379729469163
-c24,13515.137,6763.194,5262.139,7.585986,25548.055986,1.82144218109417
-c25,13515.137,8052.39,5262.139,16890.566,43720.232,1.06436550687937
-c26,13515.137,8052.39,4677.616,16890.566,43135.709,1.07878850193748
-c27,13515.137,6763.194,4677.616,15.343114,24971.290114,1.86351232159846
-c28,13515.137,6763.194,5262.139,6.799450,25547.26945,1.8214982587053
-c29,13515.137,6763.194,4677.616,16890.566,41846.513,1.11202352484657
-c30,16329.212,6763.194,4677.616,11.525024,27781.547024,1.67500775936988
-c31,16329.212,8052.39,4677.616,7.585986,29066.803986,1.60094336007216
-c32,13515.137,8052.39,4677.616,15.343114,26260.486114,1.77202762434732
-c33,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c34,16329.212,6763.194,4677.616,15.343114,27785.365114,1.67477759034649
-c35,13515.137,6763.194,5262.139,13499.556,39040.026,1.19196403406093
-c36,13515.137,8052.39,5262.139,13499.556,40329.222,1.15386076340907
-c37,13515.137,8052.39,4677.616,6.799450,26251.94245,1.77260432866519
-c38,13515.137,6763.194,5262.139,15.343114,25555.813114,1.82088930648889
-c39,16329.212,8052.39,5262.139,11.525024,29655.266024,1.56917516118123
-c40,13515.137,6763.194,5262.139,16890.566,42431.036,1.09670447099924
-c41,13515.137,8052.39,5262.139,7.585986,26837.251986,1.73394455031688
-c42,16329.212,6763.194,5262.139,16890.566,45245.111,1.02849359563182
-c43,16329.212,6763.194,5262.139,15.343114,28369.888114,1.64027107364618
-c44,16329.212,6763.194,4677.616,6.799450,27776.82145,1.67529272261174
-c45,16329.212,8052.39,4677.616,6.799450,29066.01745,1.60098668212632
-c2,24960.993064
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c1,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c2,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c3,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c4,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c5,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c6,3266.286,1604.6779,1242.0754,7.919790,6120.95909,1.49124289456339
-c7,3266.286,1604.6779,1198.9482,7.919790,6077.83189,1.50182448527999
-c8,3266.286,1507.4203,1242.0754,7.919790,6023.70149,1.51532023351774
-c9,3266.286,1507.4203,1198.9482,7.919790,5980.57429,1.5262475315519
-c10,2948.6705,1604.6779,1198.9482,2669.1484,8421.445,1.08388011696472
-c11,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c12,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c13,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c14,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c15,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c16,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c17,2948.6705,1507.4203,1242.0754,2669.1484,8367.3146,1.09089202776131
-c18,2948.6705,1604.6779,1242.0754,2669.1484,8464.5722,1.07835772162995
-c19,3266.286,1604.6779,1242.0754,7.919790,6120.95909,1.49124289456339
-c20,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c21,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c22,3266.286,1507.4203,1242.0754,7.919790,6023.70149,1.51532023351774
-c23,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c24,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c25,2948.6705,1604.6779,1198.9482,3057.9248,8810.2214,1.03605078487527
-c26,2948.6705,1604.6779,1242.0754,3057.9248,8853.3486,1.03100388443979
-c27,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c28,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c29,2948.6705,1507.4203,1242.0754,3057.9248,8756.091,1.04245567979529
-c30,3266.286,1507.4203,1242.0754,7.919790,6023.70149,1.51532023351774
-c31,3266.286,1604.6779,1242.0754,7.919790,6120.95909,1.49124289456339
-c32,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c33,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c34,3266.286,1507.4203,1242.0754,7.919790,6023.70149,1.51532023351774
-c35,2948.6705,1507.4203,1198.9482,2669.1484,8324.1874,1.09654388491369
-c36,2948.6705,1604.6779,1198.9482,2669.1484,8421.445,1.08388011696472
-c37,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c38,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c39,3266.286,1604.6779,1198.9482,7.919790,6077.83189,1.50182448527999
-c40,2948.6705,1507.4203,1198.9482,3057.9248,8712.9638,1.04761559955505
-c41,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c42,3266.286,1507.4203,1198.9482,3057.9248,9030.5793,1.01076979623257
-c43,3266.286,1507.4203,1198.9482,7.919790,5980.57429,1.5262475315519
-c44,3266.286,1507.4203,1242.0754,7.919790,6023.70149,1.51532023351774
-c45,3266.286,1604.6779,1242.0754,7.919790,6120.95909,1.49124289456339
-c1,5662.95879
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c1,13729.886,6763.194,5411.551,80.161512,25984.792512,1.79082849322069
-c2,13729.886,6763.194,4677.616,80.161512,25250.857512,1.84288025836736
-c3,13729.886,8223.137,5262.139,80.161512,27295.323512,1.70484540361126
-c4,13729.886,6763.194,4677.616,80.161512,25250.857512,1.84288025836736
-c5,13729.886,8223.137,4883.737,80.161512,26916.921512,1.72881236832277
-c6,16329.212,8052.39,4883.737,80.161512,29345.500512,1.58573907514023
-c7,16329.212,8052.39,5262.139,80.161512,29723.902512,1.56555172473259
-c8,16329.212,6982.906,4677.616,80.161512,28069.895512,1.65780121320103
-c9,16329.212,6982.906,5411.551,80.161512,28803.830512,1.61555966728305
-c10,13729.886,8223.137,5262.139,13687.244,40902.406,1.13769118829418
-c11,13729.886,8223.137,5262.139,86.640472,27301.802472,1.70444082866984
-c12,13729.886,8223.137,4883.737,86.640472,26923.400472,1.72839633966577
-c13,13729.886,8223.137,5262.139,90.458562,27305.620562,1.70420250013799
-c14,13729.886,8223.137,5262.139,81.914898,27297.076898,1.7047358954737
-c15,13729.886,6763.194,5411.551,86.640472,25991.271472,1.79038208542789
-c16,13729.886,8223.137,4883.737,82.701434,26919.461434,1.72864925032865
-c17,13729.886,6763.194,4677.616,13499.556,38670.252,1.20336187309211
-c18,13729.886,8223.137,4883.737,13499.556,40336.316,1.1536578324266
-c19,16329.212,8052.39,4883.737,86.640472,29351.979472,1.58538904968409
-c20,13729.886,6763.194,4677.616,82.701434,25253.397434,1.84269490619424
-c21,13729.886,6763.194,4677.616,81.914898,25252.610898,1.84275230009624
-c22,16329.212,6982.906,4677.616,82.701434,28072.435434,1.65765121959724
-c23,13729.886,6763.194,4677.616,86.640472,25257.336472,1.84240752651597
-c24,13729.886,6763.194,5411.551,82.701434,25987.332434,1.79065346314855
-c25,13729.886,8223.137,5262.139,16890.566,44105.728,1.05506266429825
-c26,13729.886,8223.137,4883.737,17104.016,43940.776,1.05902332935808
-c27,13729.886,6763.194,4677.616,90.458562,25261.154562,1.84212905635707
-c28,13729.886,6763.194,5411.551,81.914898,25986.545898,1.79070766094045
-c29,13729.886,6763.194,4677.616,17104.016,42274.712,1.10075987956876
-c30,16329.212,6982.906,4677.616,86.640472,28076.374472,1.65741865569808
-c31,16329.212,8052.39,4883.737,82.701434,29348.040434,1.58560183757718
-c32,13729.886,8223.137,4883.737,90.458562,26927.218562,1.72815126523519
-c33,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c34,16329.212,6982.906,4677.616,90.458562,28080.192562,1.6571932949368
-c35,13729.886,6763.194,5411.551,13687.244,39591.875,1.17534991415448
-c36,13729.886,8223.137,5262.139,13687.244,40902.406,1.13769118829418
-c37,13729.886,8223.137,4883.737,81.914898,26918.674898,1.72869975968198
-c38,13729.886,6763.194,5411.551,90.458562,25995.089562,1.79011911884361
-c39,16329.212,8052.39,5262.139,86.640472,29730.381472,1.56521055363198
-c40,13729.886,6763.194,5411.551,16890.566,42795.197,1.08737218551098
-c41,13729.886,8223.137,5262.139,82.701434,27297.863434,1.70468677675235
-c42,16329.212,6982.906,5411.551,16890.566,45614.235,1.02017071859219
-c43,16329.212,6982.906,5411.551,90.458562,28814.127562,1.6149823290111
-c44,16329.212,6982.906,4677.616,81.914898,28071.648898,1.65769766511812
-c45,16329.212,8052.39,4883.737,81.914898,29347.253898,1.5856443333053
-c2,25250.857512
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,28.951489,28.951489,0
-c2,0,0,0,28.951489,28.951489,0
-c3,0,0,0,28.951489,28.951489,0
-c4,0,0,0,28.951489,28.951489,0
-c5,0,0,0,28.951489,28.951489,0
-c6,0,0,0,28.951489,28.951489,0
-c7,0,0,0,28.951489,28.951489,0
-c8,0,0,0,28.951489,28.951489,0
-c9,0,0,0,28.951489,28.951489,0
-c10,0,0,0,0,0,0
-c11,0,0,0,28.951489,28.951489,0
-c12,0,0,0,28.951489,28.951489,0
-c13,0,0,0,28.951489,28.951489,0
-c14,0,0,0,28.951489,28.951489,0
-c15,0,0,0,28.951489,28.951489,0
-c16,0,0,0,28.951489,28.951489,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,28.951489,28.951489,0
-c20,0,0,0,28.951489,28.951489,0
-c21,0,0,0,28.951489,28.951489,0
-c22,0,0,0,28.951489,28.951489,0
-c23,0,0,0,28.951489,28.951489,0
-c24,0,0,0,28.951489,28.951489,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,28.951489,28.951489,0
-c28,0,0,0,28.951489,28.951489,0
-c29,0,0,0,0,0,0
-c30,0,0,0,28.951489,28.951489,0
-c31,0,0,0,28.951489,28.951489,0
-c32,0,0,0,28.951489,28.951489,0
-c33,0,0,0,0,0,0
-c34,0,0,0,28.951489,28.951489,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,0,0,0,28.951489,28.951489,0
-c38,0,0,0,28.951489,28.951489,0
-c39,0,0,0,28.951489,28.951489,0
-c40,0,0,0,0,0,0
-c41,0,0,0,28.951489,28.951489,0
-c42,0,0,0,0,0,0
-c43,0,0,0,28.951489,28.951489,0
-c44,0,0,0,28.951489,28.951489,0
-c45,0,0,0,28.951489,28.951489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,46.163959,46.163959,0
-c2,0,0,0,46.163959,46.163959,0
-c3,0,0,0,46.163959,46.163959,0
-c4,0,0,0,46.163959,46.163959,0
-c5,0,0,0,46.163959,46.163959,0
-c6,0,0,0,46.163959,46.163959,0
-c7,0,0,0,46.163959,46.163959,0
-c8,0,0,0,46.163959,46.163959,0
-c9,0,0,0,46.163959,46.163959,0
-c10,0,0,0,0,0,0
-c11,0,0,0,46.163959,46.163959,0
-c12,0,0,0,46.163959,46.163959,0
-c13,0,0,0,46.163959,46.163959,0
-c14,0,0,0,46.163959,46.163959,0
-c15,0,0,0,46.163959,46.163959,0
-c16,0,0,0,46.163959,46.163959,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,46.163959,46.163959,0
-c20,0,0,0,46.163959,46.163959,0
-c21,0,0,0,46.163959,46.163959,0
-c22,0,0,0,46.163959,46.163959,0
-c23,0,0,0,46.163959,46.163959,0
-c24,0,0,0,46.163959,46.163959,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,46.163959,46.163959,0
-c28,0,0,0,46.163959,46.163959,0
-c29,0,0,0,0,0,0
-c30,0,0,0,46.163959,46.163959,0
-c31,0,0,0,46.163959,46.163959,0
-c32,0,0,0,46.163959,46.163959,0
-c33,0,0,0,0,0,0
-c34,0,0,0,46.163959,46.163959,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,0,0,0,46.163959,46.163959,0
-c38,0,0,0,46.163959,46.163959,0
-c39,0,0,0,46.163959,46.163959,0
-c40,0,0,0,0,0,0
-c41,0,0,0,46.163959,46.163959,0
-c42,0,0,0,0,0,0
-c43,0,0,0,46.163959,46.163959,0
-c44,0,0,0,46.163959,46.163959,0
-c45,0,0,0,46.163959,46.163959,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,10.930367,10.930367,0
-c2,0,0,0,10.930367,10.930367,0
-c3,0,0,0,10.930367,10.930367,0
-c4,0,0,0,10.930367,10.930367,0
-c5,0,0,0,10.930367,10.930367,0
-c6,0,0,0,10.930367,10.930367,0
-c7,0,0,0,10.930367,10.930367,0
-c8,0,0,0,10.930367,10.930367,0
-c9,0,0,0,10.930367,10.930367,0
-c10,0,0,0,0,0,0
-c11,0,0,0,10.930367,10.930367,0
-c12,0,0,0,10.930367,10.930367,0
-c13,0,0,0,10.930367,10.930367,0
-c14,0,0,0,10.930367,10.930367,0
-c15,0,0,0,10.930367,10.930367,0
-c16,0,0,0,10.930367,10.930367,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,10.930367,10.930367,0
-c20,0,0,0,10.930367,10.930367,0
-c21,0,0,0,10.930367,10.930367,0
-c22,0,0,0,10.930367,10.930367,0
-c23,0,0,0,10.930367,10.930367,0
-c24,0,0,0,10.930367,10.930367,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,10.930367,10.930367,0
-c28,0,0,0,10.930367,10.930367,0
-c29,0,0,0,0,0,0
-c30,0,0,0,10.930367,10.930367,0
-c31,0,0,0,10.930367,10.930367,0
-c32,0,0,0,10.930367,10.930367,0
-c33,0,0,0,0,0,0
-c34,0,0,0,10.930367,10.930367,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,0,0,0,10.930367,10.930367,0
-c38,0,0,0,10.930367,10.930367,0
-c39,0,0,0,10.930367,10.930367,0
-c40,0,0,0,0,0,0
-c41,0,0,0,10.930367,10.930367,0
-c42,0,0,0,0,0,0
-c43,0,0,0,10.930367,10.930367,0
-c44,0,0,0,10.930367,10.930367,0
-c45,0,0,0,10.930367,10.930367,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,0,0,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,0,0,0,0,0,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,0,0,0
-c28,0,0,0,0,0,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,0,0,0,0,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,0,0,0,0,0,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,0,0,0,0,0,0
-c38,0,0,0,0,0,0
-c39,0,0,0,0,0,0
-c40,0,0,0,0,0,0
-c41,0,0,0,0,0,0
-c42,0,0,0,0,0,0
-c43,0,0,0,0,0,0
-c44,0,0,0,0,0,0
-c45,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,214.749,0,149.412,0,364.161,0
-c2,214.749,0,0,0,214.749,0
-c3,214.749,170.747,0,0,385.496,0
-c4,214.749,0,0,0,214.749,0
-c5,214.749,170.747,206.121,0,591.617,0
-c6,0,0,206.121,0,206.121,0
-c7,0,0,0,0,0,0
-c8,0,219.712,0,0,219.712,0
-c9,0,219.712,149.412,0,369.124,0
-c10,214.749,170.747,0,187.688,573.184,0
-c11,214.749,170.747,0,0,385.496,0
-c12,214.749,170.747,206.121,0,591.617,0
-c13,214.749,170.747,0,0,385.496,0
-c14,214.749,170.747,0,0,385.496,0
-c15,214.749,0,149.412,0,364.161,0
-c16,214.749,170.747,206.121,0,591.617,0
-c17,214.749,0,0,0,214.749,0
-c18,214.749,170.747,206.121,0,591.617,0
-c19,0,0,206.121,0,206.121,0
-c20,214.749,0,0,0,214.749,0
-c21,214.749,0,0,0,214.749,0
-c22,0,219.712,0,0,219.712,0
-c23,214.749,0,0,0,214.749,0
-c24,214.749,0,149.412,0,364.161,0
-c25,214.749,170.747,0,0,385.496,0
-c26,214.749,170.747,206.121,213.45,805.067,0
-c27,214.749,0,0,0,214.749,0
-c28,214.749,0,149.412,0,364.161,0
-c29,214.749,0,0,213.45,428.199,0
-c30,0,219.712,0,0,219.712,0
-c31,0,0,206.121,0,206.121,0
-c32,214.749,170.747,206.121,0,591.617,0
-c33,0,0,0,0,0,0
-c34,0,219.712,0,0,219.712,0
-c35,214.749,0,149.412,187.688,551.849,0
-c36,214.749,170.747,0,187.688,573.184,0
-c37,214.749,170.747,206.121,0,591.617,0
-c38,214.749,0,149.412,0,364.161,0
-c39,0,0,0,0,0,0
-c40,214.749,0,149.412,0,364.161,0
-c41,214.749,170.747,0,0,385.496,0
-c42,0,219.712,149.412,0,369.124,0
-c43,0,219.712,149.412,0,369.124,0
-c44,0,219.712,0,0,219.712,0
-c45,0,0,206.121,0,206.121,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,135.06,0,36.5783,0,171.6383,0
-c2,135.06,0,0,0,135.06,0
-c3,135.06,36.4586,0,0,171.5186,0
-c4,135.06,0,0,0,135.06,0
-c5,135.06,36.4586,49.3758,0,220.8944,0
-c6,0,0,49.3758,0,49.3758,0
-c7,0,0,0,0,0,0
-c8,0,45.7961,0,0,45.7961,0
-c9,0,45.7961,36.5783,0,82.3744,0
-c10,135.06,36.4586,0,52.0572,223.5758,0
-c11,135.06,36.4586,0,0,171.5186,0
-c12,135.06,36.4586,49.3758,0,220.8944,0
-c13,135.06,36.4586,0,0,171.5186,0
-c14,135.06,36.4586,0,0,171.5186,0
-c15,135.06,0,36.5783,0,171.6383,0
-c16,135.06,36.4586,49.3758,0,220.8944,0
-c17,135.06,0,0,0,135.06,0
-c18,135.06,36.4586,49.3758,0,220.8944,0
-c19,0,0,49.3758,0,49.3758,0
-c20,135.06,0,0,0,135.06,0
-c21,135.06,0,0,0,135.06,0
-c22,0,45.7961,0,0,45.7961,0
-c23,135.06,0,0,0,135.06,0
-c24,135.06,0,36.5783,0,171.6383,0
-c25,135.06,36.4586,0,0,171.5186,0
-c26,135.06,36.4586,49.3758,36.6862,257.5806,0
-c27,135.06,0,0,0,135.06,0
-c28,135.06,0,36.5783,0,171.6383,0
-c29,135.06,0,0,36.6862,171.7462,0
-c30,0,45.7961,0,0,45.7961,0
-c31,0,0,49.3758,0,49.3758,0
-c32,135.06,36.4586,49.3758,0,220.8944,0
-c33,0,0,0,0,0,0
-c34,0,45.7961,0,0,45.7961,0
-c35,135.06,0,36.5783,52.0572,223.6955,0
-c36,135.06,36.4586,0,52.0572,223.5758,0
-c37,135.06,36.4586,49.3758,0,220.8944,0
-c38,135.06,0,36.5783,0,171.6383,0
-c39,0,0,0,0,0,0
-c40,135.06,0,36.5783,0,171.6383,0
-c41,135.06,36.4586,0,0,171.5186,0
-c42,0,45.7961,36.5783,0,82.3744,0
-c43,0,45.7961,36.5783,0,82.3744,0
-c44,0,45.7961,0,0,45.7961,0
-c45,0,0,49.3758,0,49.3758,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c1,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c2,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c3,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c4,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c5,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c6,3266.286,1604.6779,1291.4512,18.850157,6181.265257,1.47669390858025
-c7,3266.286,1604.6779,1198.9482,18.850157,6088.762257,1.49912845416049
-c8,3266.286,1553.2164,1242.0754,18.850157,6080.427957,1.50118327434065
-c9,3266.286,1553.2164,1235.5265,18.850157,6073.879057,1.50280186089649
-c10,3083.7305,1641.1365,1198.9482,2721.2056,8645.0208,1.05584902634533
-c11,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c12,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c13,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c14,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c15,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c16,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c17,3083.7305,1507.4203,1242.0754,2669.1484,8502.3746,1.07356323639794
-c18,3083.7305,1641.1365,1291.4512,2669.1484,8685.4666,1.05093223142517
-c19,3266.286,1604.6779,1291.4512,18.850157,6181.265257,1.47669390858025
-c20,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c21,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c22,3266.286,1553.2164,1242.0754,18.850157,6080.427957,1.50118327434065
-c23,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c24,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c25,3083.7305,1641.1365,1198.9482,3057.9248,8981.74,1.01626597946204
-c26,3083.7305,1641.1365,1291.4512,3094.611,9110.9292,1.00185574922637
-c27,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c28,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c29,3083.7305,1507.4203,1242.0754,3094.611,8927.8372,1.02240179712952
-c30,3266.286,1553.2164,1242.0754,18.850157,6080.427957,1.50118327434065
-c31,3266.286,1604.6779,1291.4512,18.850157,6181.265257,1.47669390858025
-c32,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c33,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c34,3266.286,1553.2164,1242.0754,18.850157,6080.427957,1.50118327434065
-c35,3083.7305,1507.4203,1235.5265,2721.2056,8547.8829,1.06784766473757
-c36,3083.7305,1641.1365,1198.9482,2721.2056,8645.0208,1.05584902634533
-c37,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c38,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c39,3266.286,1604.6779,1198.9482,18.850157,6088.762257,1.49912845416049
-c40,3083.7305,1507.4203,1235.5265,3057.9248,8884.6021,1.02737710642802
-c41,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c42,3266.286,1553.2164,1235.5265,3057.9248,9112.9537,1.00163318067079
-c43,3266.286,1553.2164,1235.5265,18.850157,6073.879057,1.50280186089649
-c44,3266.286,1553.2164,1242.0754,18.850157,6080.427957,1.50118327434065
-c45,3266.286,1604.6779,1291.4512,18.850157,6181.265257,1.47669390858025
-c1,5845.527457
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,0,0,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,0,0,0,0,0,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,0,0,0
-c28,0,0,0,0,0,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,0,0,0,0,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,0,0,0,0,0,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,0,0,0,0,0,0
-c38,0,0,0,0,0,0
-c39,0,0,0,0,0,0
-c40,0,0,0,0,0,0
-c41,0,0,0,0,0,0
-c42,0,0,0,0,0,0
-c43,0,0,0,0,0,0
-c44,0,0,0,0,0,0
-c45,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_results2.csv b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_results2.csv
deleted file mode 100644
index 76a1c5a4797a1520feee8c0c017c483778f867c7..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_promise_results2.csv
+++ /dev/null
@@ -1,528 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c1,13515.137,6763.194,4677.616,7.585986,24963.532986,1.86409138641106
-c2,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c3,16329.212,6763.194,4677.616,7.585986,27777.607986,1.67524528591263
-c4,13515.137,6763.194,4677.616,6.799450,24962.74645,1.86415012093291
-c5,13515.137,6763.194,4677.616,15.343114,24971.290114,1.86351232159846
-c6,13515.137,6763.194,4677.616,11.525024,24967.472024,1.86379729469163
-c7,13515.137,8052.39,4677.616,16890.566,43135.709,1.07878850193748
-c8,13515.137,8052.39,4677.616,13499.556,39744.699,1.17083052718343
-c9,16329.212,6763.194,4677.616,13499.556,41269.578,1.127569244523
-c10,13515.137,6763.194,5262.139,6.799450,25547.26945,1.8214982587053
-c11,13515.137,8052.39,4677.616,6.799450,26251.94245,1.77260432866519
-c12,16329.212,8052.39,4677.616,6.799450,29066.01745,1.60098668212632
-c13,13515.137,8052.39,5262.139,6.799450,26836.46545,1.73399536959516
-c14,13515.137,6763.194,5262.139,16890.566,42431.036,1.09670447099924
-c15,13515.137,6763.194,5262.139,4.563278,25545.033278,1.82165770979483
-c16,16329.212,6763.194,5262.139,16890.566,45245.111,1.02849359563182
-c17,13515.137,6763.194,4677.616,16890.566,41846.513,1.11202352484657
-c18,13515.137,6763.194,4677.616,7.585986,24963.532986,1.86409138641106
-c19,13515.137,6763.194,5262.139,7.585986,25548.055986,1.82144218109417
-c20,13515.137,6763.194,4677.616,13499.556,38455.503,1.21008186732057
-c21,13515.137,6763.194,4677.616,5.046064,24960.993064,1.86428106823546
-c22,16329.212,6763.194,4677.616,4.563278,27774.585278,1.67542760284945
-c23,13515.137,8052.39,5262.139,11.525024,26841.191024,1.73369008793322
-c24,16329.212,6763.194,4677.616,6.799450,27776.82145,1.67529272261174
-c25,13515.137,8052.39,4677.616,5.046064,26250.189064,1.77272273008295
-c26,16329.212,6763.194,5262.139,15.343114,28369.888114,1.64027107364618
-c27,16329.212,6763.194,5262.139,6.799450,28361.34445,1.64076519425804
-c28,16329.212,8052.39,4677.616,13499.556,42558.774,1.09341276820283
-c29,13515.137,6763.194,4677.616,4.563278,24960.510278,1.86431712714556
-c30,13515.137,6763.194,5262.139,11.525024,25551.995024,1.82116139167122
-c31,13515.137,6763.194,5262.139,5.046064,25545.516064,1.82162328219378
-c32,16329.212,8052.39,5262.139,7.585986,29651.326986,1.56938361864995
-c33,16329.212,8052.39,5262.139,4.563278,29648.304278,1.56954362066419
-c34,13515.137,6763.194,5262.139,15.343114,25555.813114,1.82088930648889
-c35,13515.137,8052.39,5262.139,5.046064,26834.712064,1.73410866923432
-c36,13515.137,8052.39,5262.139,4.563278,26834.229278,1.73413986831875
-c37,16329.212,8052.39,5262.139,15.343114,29659.084114,1.56897315723708
-c38,13515.137,8052.39,4677.616,4.563278,26249.706278,1.77275533409397
-c39,13515.137,8052.39,5262.139,7.585986,26837.251986,1.73394455031688
-c40,16329.212,6763.194,5262.139,11.525024,28366.070024,1.64049185511349
-c41,13515.137,8052.39,4677.616,7.585986,26252.728986,1.77255122115345
-c42,13515.137,8052.39,4677.616,11.525024,26256.668024,1.7722853021654
-c43,13515.137,8052.39,4677.616,15.343114,26260.486114,1.77202762434732
-c29,24960.510278
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c1,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c2,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c3,3266.286,1507.4203,1242.0754,7.919790,6023.70149,1.51532023351774
-c4,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c5,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c6,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c7,2948.6705,1604.6779,1242.0754,3057.9248,8853.3486,1.03100388443979
-c8,2948.6705,1604.6779,1242.0754,2669.1484,8464.5722,1.07835772162995
-c9,3266.286,1507.4203,1242.0754,2669.1484,8684.9301,1.05099715136456
-c10,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c11,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c12,3266.286,1604.6779,1242.0754,7.919790,6120.95909,1.49124289456339
-c13,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c14,2948.6705,1507.4203,1198.9482,3057.9248,8712.9638,1.04761559955505
-c15,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c16,3266.286,1507.4203,1198.9482,3057.9248,9030.5793,1.01076979623257
-c17,2948.6705,1507.4203,1242.0754,3057.9248,8756.091,1.04245567979529
-c18,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c19,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c20,2948.6705,1507.4203,1242.0754,2669.1484,8367.3146,1.09089202776131
-c21,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c22,3266.286,1507.4203,1242.0754,7.919790,6023.70149,1.51532023351774
-c23,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c24,3266.286,1507.4203,1242.0754,7.919790,6023.70149,1.51532023351774
-c25,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c26,3266.286,1507.4203,1198.9482,7.919790,5980.57429,1.5262475315519
-c27,3266.286,1507.4203,1198.9482,7.919790,5980.57429,1.5262475315519
-c28,3266.286,1604.6779,1242.0754,2669.1484,8782.1877,1.03935797182565
-c29,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c30,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c31,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c32,3266.286,1604.6779,1198.9482,7.919790,6077.83189,1.50182448527999
-c33,3266.286,1604.6779,1198.9482,7.919790,6077.83189,1.50182448527999
-c34,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c35,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c36,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c37,3266.286,1604.6779,1198.9482,7.919790,6077.83189,1.50182448527999
-c38,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c39,2948.6705,1604.6779,1198.9482,7.919790,5760.21639,1.584634347658
-c40,3266.286,1507.4203,1198.9482,7.919790,5980.57429,1.5262475315519
-c41,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c42,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c43,2948.6705,1604.6779,1242.0754,7.919790,5803.34359,1.57285823269929
-c10,5662.95879
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c1,13729.886,6763.194,4677.616,82.701434,25253.397434,1.84269490619424
-c2,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c3,16329.212,6982.906,4677.616,82.701434,28072.435434,1.65765121959724
-c4,13729.886,6763.194,4677.616,81.914898,25252.610898,1.84275230009624
-c5,13729.886,6763.194,4677.616,90.458562,25261.154562,1.84212905635707
-c6,13729.886,6763.194,4677.616,86.640472,25257.336472,1.84240752651597
-c7,13729.886,8223.137,4883.737,17104.016,43940.776,1.05902332935808
-c8,13729.886,8223.137,4883.737,13499.556,40336.316,1.1536578324266
-c9,16329.212,6982.906,4677.616,13499.556,41489.29,1.12159805308407
-c10,13729.886,6763.194,5411.551,81.914898,25986.545898,1.79070766094045
-c11,13729.886,8223.137,4883.737,81.914898,26918.674898,1.72869975968198
-c12,16329.212,8052.39,4883.737,81.914898,29347.253898,1.5856443333053
-c13,13729.886,8223.137,5262.139,81.914898,27297.076898,1.7047358954737
-c14,13729.886,6763.194,5411.551,16890.566,42795.197,1.08737218551098
-c15,13729.886,6763.194,5411.551,79.678726,25984.309726,1.79086176664341
-c16,16329.212,6982.906,5411.551,16890.566,45614.235,1.02017071859219
-c17,13729.886,6763.194,4677.616,17104.016,42274.712,1.10075987956876
-c18,13729.886,6763.194,4677.616,82.701434,25253.397434,1.84269490619424
-c19,13729.886,6763.194,5411.551,82.701434,25987.332434,1.79065346314855
-c20,13729.886,6763.194,4677.616,13499.556,38670.252,1.20336187309211
-c21,13729.886,6763.194,4677.616,80.161512,25250.857512,1.84288025836736
-c22,16329.212,6982.906,4677.616,79.678726,28069.412726,1.65782972691529
-c23,13729.886,8223.137,5262.139,86.640472,27301.802472,1.70444082866984
-c24,16329.212,6982.906,4677.616,81.914898,28071.648898,1.65769766511812
-c25,13729.886,8223.137,4883.737,80.161512,26916.921512,1.72881236832277
-c26,16329.212,6982.906,5411.551,90.458562,28814.127562,1.6149823290111
-c27,16329.212,6982.906,5411.551,81.914898,28805.583898,1.61546132872123
-c28,16329.212,8052.39,4883.737,13499.556,42764.895,1.08814266681084
-c29,13729.886,6763.194,4677.616,79.678726,25250.374726,1.84291549415275
-c30,13729.886,6763.194,5411.551,86.640472,25991.271472,1.79038208542789
-c31,13729.886,6763.194,5411.551,80.161512,25984.792512,1.79082849322069
-c32,16329.212,8052.39,5262.139,82.701434,29726.442434,1.56541795900319
-c33,16329.212,8052.39,5262.139,79.678726,29723.419726,1.5655771533831
-c34,13729.886,6763.194,5411.551,90.458562,25995.089562,1.79011911884361
-c35,13729.886,8223.137,5262.139,80.161512,27295.323512,1.70484540361126
-c36,13729.886,8223.137,5262.139,79.678726,27294.840726,1.70487555859543
-c37,16329.212,8052.39,5262.139,90.458562,29734.199562,1.56500956908117
-c38,13729.886,8223.137,4883.737,79.678726,26916.438726,1.72884337712053
-c39,13729.886,8223.137,5262.139,82.701434,27297.863434,1.70468677675235
-c40,16329.212,6982.906,5411.551,86.640472,28810.309472,1.615196354753
-c41,13729.886,8223.137,4883.737,82.701434,26919.461434,1.72864925032865
-c42,13729.886,8223.137,4883.737,86.640472,26923.400472,1.72839633966577
-c43,13729.886,8223.137,4883.737,90.458562,26927.218562,1.72815126523519
-c29,25250.374726
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,28.951489,28.951489,0
-c2,0,0,0,0,0,0
-c3,0,0,0,28.951489,28.951489,0
-c4,0,0,0,28.951489,28.951489,0
-c5,0,0,0,28.951489,28.951489,0
-c6,0,0,0,28.951489,28.951489,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,28.951489,28.951489,0
-c11,0,0,0,28.951489,28.951489,0
-c12,0,0,0,28.951489,28.951489,0
-c13,0,0,0,28.951489,28.951489,0
-c14,0,0,0,0,0,0
-c15,0,0,0,28.951489,28.951489,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,28.951489,28.951489,0
-c19,0,0,0,28.951489,28.951489,0
-c20,0,0,0,0,0,0
-c21,0,0,0,28.951489,28.951489,0
-c22,0,0,0,28.951489,28.951489,0
-c23,0,0,0,28.951489,28.951489,0
-c24,0,0,0,28.951489,28.951489,0
-c25,0,0,0,28.951489,28.951489,0
-c26,0,0,0,28.951489,28.951489,0
-c27,0,0,0,28.951489,28.951489,0
-c28,0,0,0,0,0,0
-c29,0,0,0,28.951489,28.951489,0
-c30,0,0,0,28.951489,28.951489,0
-c31,0,0,0,28.951489,28.951489,0
-c32,0,0,0,28.951489,28.951489,0
-c33,0,0,0,28.951489,28.951489,0
-c34,0,0,0,28.951489,28.951489,0
-c35,0,0,0,28.951489,28.951489,0
-c36,0,0,0,28.951489,28.951489,0
-c37,0,0,0,28.951489,28.951489,0
-c38,0,0,0,28.951489,28.951489,0
-c39,0,0,0,28.951489,28.951489,0
-c40,0,0,0,28.951489,28.951489,0
-c41,0,0,0,28.951489,28.951489,0
-c42,0,0,0,28.951489,28.951489,0
-c43,0,0,0,28.951489,28.951489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,46.163959,46.163959,0
-c2,0,0,0,0,0,0
-c3,0,0,0,46.163959,46.163959,0
-c4,0,0,0,46.163959,46.163959,0
-c5,0,0,0,46.163959,46.163959,0
-c6,0,0,0,46.163959,46.163959,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,46.163959,46.163959,0
-c11,0,0,0,46.163959,46.163959,0
-c12,0,0,0,46.163959,46.163959,0
-c13,0,0,0,46.163959,46.163959,0
-c14,0,0,0,0,0,0
-c15,0,0,0,46.163959,46.163959,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,46.163959,46.163959,0
-c19,0,0,0,46.163959,46.163959,0
-c20,0,0,0,0,0,0
-c21,0,0,0,46.163959,46.163959,0
-c22,0,0,0,46.163959,46.163959,0
-c23,0,0,0,46.163959,46.163959,0
-c24,0,0,0,46.163959,46.163959,0
-c25,0,0,0,46.163959,46.163959,0
-c26,0,0,0,46.163959,46.163959,0
-c27,0,0,0,46.163959,46.163959,0
-c28,0,0,0,0,0,0
-c29,0,0,0,46.163959,46.163959,0
-c30,0,0,0,46.163959,46.163959,0
-c31,0,0,0,46.163959,46.163959,0
-c32,0,0,0,46.163959,46.163959,0
-c33,0,0,0,46.163959,46.163959,0
-c34,0,0,0,46.163959,46.163959,0
-c35,0,0,0,46.163959,46.163959,0
-c36,0,0,0,46.163959,46.163959,0
-c37,0,0,0,46.163959,46.163959,0
-c38,0,0,0,46.163959,46.163959,0
-c39,0,0,0,46.163959,46.163959,0
-c40,0,0,0,46.163959,46.163959,0
-c41,0,0,0,46.163959,46.163959,0
-c42,0,0,0,46.163959,46.163959,0
-c43,0,0,0,46.163959,46.163959,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,10.930367,10.930367,0
-c2,0,0,0,0,0,0
-c3,0,0,0,10.930367,10.930367,0
-c4,0,0,0,10.930367,10.930367,0
-c5,0,0,0,10.930367,10.930367,0
-c6,0,0,0,10.930367,10.930367,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,10.930367,10.930367,0
-c11,0,0,0,10.930367,10.930367,0
-c12,0,0,0,10.930367,10.930367,0
-c13,0,0,0,10.930367,10.930367,0
-c14,0,0,0,0,0,0
-c15,0,0,0,10.930367,10.930367,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,10.930367,10.930367,0
-c19,0,0,0,10.930367,10.930367,0
-c20,0,0,0,0,0,0
-c21,0,0,0,10.930367,10.930367,0
-c22,0,0,0,10.930367,10.930367,0
-c23,0,0,0,10.930367,10.930367,0
-c24,0,0,0,10.930367,10.930367,0
-c25,0,0,0,10.930367,10.930367,0
-c26,0,0,0,10.930367,10.930367,0
-c27,0,0,0,10.930367,10.930367,0
-c28,0,0,0,0,0,0
-c29,0,0,0,10.930367,10.930367,0
-c30,0,0,0,10.930367,10.930367,0
-c31,0,0,0,10.930367,10.930367,0
-c32,0,0,0,10.930367,10.930367,0
-c33,0,0,0,10.930367,10.930367,0
-c34,0,0,0,10.930367,10.930367,0
-c35,0,0,0,10.930367,10.930367,0
-c36,0,0,0,10.930367,10.930367,0
-c37,0,0,0,10.930367,10.930367,0
-c38,0,0,0,10.930367,10.930367,0
-c39,0,0,0,10.930367,10.930367,0
-c40,0,0,0,10.930367,10.930367,0
-c41,0,0,0,10.930367,10.930367,0
-c42,0,0,0,10.930367,10.930367,0
-c43,0,0,0,10.930367,10.930367,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,0,0,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,0,0,0,0,0,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,0,0,0
-c28,0,0,0,0,0,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,0,0,0,0,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,0,0,0,0,0,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,0,0,0,0,0,0
-c38,0,0,0,0,0,0
-c39,0,0,0,0,0,0
-c40,0,0,0,0,0,0
-c41,0,0,0,0,0,0
-c42,0,0,0,0,0,0
-c43,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,214.749,0,0,0,214.749,0
-c2,0,0,0,0,0,0
-c3,0,219.712,0,0,219.712,0
-c4,214.749,0,0,0,214.749,0
-c5,214.749,0,0,0,214.749,0
-c6,214.749,0,0,0,214.749,0
-c7,214.749,170.747,206.121,213.45,805.067,0
-c8,214.749,170.747,206.121,0,591.617,0
-c9,0,219.712,0,0,219.712,0
-c10,214.749,0,149.412,0,364.161,0
-c11,214.749,170.747,206.121,0,591.617,0
-c12,0,0,206.121,0,206.121,0
-c13,214.749,170.747,0,0,385.496,0
-c14,214.749,0,149.412,0,364.161,0
-c15,214.749,0,149.412,0,364.161,0
-c16,0,219.712,149.412,0,369.124,0
-c17,214.749,0,0,213.45,428.199,0
-c18,214.749,0,0,0,214.749,0
-c19,214.749,0,149.412,0,364.161,0
-c20,214.749,0,0,0,214.749,0
-c21,214.749,0,0,0,214.749,0
-c22,0,219.712,0,0,219.712,0
-c23,214.749,170.747,0,0,385.496,0
-c24,0,219.712,0,0,219.712,0
-c25,214.749,170.747,206.121,0,591.617,0
-c26,0,219.712,149.412,0,369.124,0
-c27,0,219.712,149.412,0,369.124,0
-c28,0,0,206.121,0,206.121,0
-c29,214.749,0,0,0,214.749,0
-c30,214.749,0,149.412,0,364.161,0
-c31,214.749,0,149.412,0,364.161,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,214.749,0,149.412,0,364.161,0
-c35,214.749,170.747,0,0,385.496,0
-c36,214.749,170.747,0,0,385.496,0
-c37,0,0,0,0,0,0
-c38,214.749,170.747,206.121,0,591.617,0
-c39,214.749,170.747,0,0,385.496,0
-c40,0,219.712,149.412,0,369.124,0
-c41,214.749,170.747,206.121,0,591.617,0
-c42,214.749,170.747,206.121,0,591.617,0
-c43,214.749,170.747,206.121,0,591.617,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,135.06,0,0,0,135.06,0
-c2,0,0,0,0,0,0
-c3,0,45.7961,0,0,45.7961,0
-c4,135.06,0,0,0,135.06,0
-c5,135.06,0,0,0,135.06,0
-c6,135.06,0,0,0,135.06,0
-c7,135.06,36.4586,49.3758,36.6862,257.5806,0
-c8,135.06,36.4586,49.3758,0,220.8944,0
-c9,0,45.7961,0,0,45.7961,0
-c10,135.06,0,36.5783,0,171.6383,0
-c11,135.06,36.4586,49.3758,0,220.8944,0
-c12,0,0,49.3758,0,49.3758,0
-c13,135.06,36.4586,0,0,171.5186,0
-c14,135.06,0,36.5783,0,171.6383,0
-c15,135.06,0,36.5783,0,171.6383,0
-c16,0,45.7961,36.5783,0,82.3744,0
-c17,135.06,0,0,36.6862,171.7462,0
-c18,135.06,0,0,0,135.06,0
-c19,135.06,0,36.5783,0,171.6383,0
-c20,135.06,0,0,0,135.06,0
-c21,135.06,0,0,0,135.06,0
-c22,0,45.7961,0,0,45.7961,0
-c23,135.06,36.4586,0,0,171.5186,0
-c24,0,45.7961,0,0,45.7961,0
-c25,135.06,36.4586,49.3758,0,220.8944,0
-c26,0,45.7961,36.5783,0,82.3744,0
-c27,0,45.7961,36.5783,0,82.3744,0
-c28,0,0,49.3758,0,49.3758,0
-c29,135.06,0,0,0,135.06,0
-c30,135.06,0,36.5783,0,171.6383,0
-c31,135.06,0,36.5783,0,171.6383,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,135.06,0,36.5783,0,171.6383,0
-c35,135.06,36.4586,0,0,171.5186,0
-c36,135.06,36.4586,0,0,171.5186,0
-c37,0,0,0,0,0,0
-c38,135.06,36.4586,49.3758,0,220.8944,0
-c39,135.06,36.4586,0,0,171.5186,0
-c40,0,45.7961,36.5783,0,82.3744,0
-c41,135.06,36.4586,49.3758,0,220.8944,0
-c42,135.06,36.4586,49.3758,0,220.8944,0
-c43,135.06,36.4586,49.3758,0,220.8944,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c1,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c2,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c3,3266.286,1553.2164,1242.0754,18.850157,6080.427957,1.50118327434065
-c4,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c5,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c6,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c7,3083.7305,1641.1365,1291.4512,3094.611,9110.9292,1.00185574922637
-c8,3083.7305,1641.1365,1291.4512,2669.1484,8685.4666,1.05093223142517
-c9,3266.286,1553.2164,1242.0754,2669.1484,8730.7262,1.04548425713448
-c10,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c11,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c12,3266.286,1604.6779,1291.4512,18.850157,6181.265257,1.47669390858025
-c13,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c14,3083.7305,1507.4203,1235.5265,3057.9248,8884.6021,1.02737710642802
-c15,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c16,3266.286,1553.2164,1235.5265,3057.9248,9112.9537,1.00163318067079
-c17,3083.7305,1507.4203,1242.0754,3094.611,8927.8372,1.02240179712952
-c18,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c19,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c20,3083.7305,1507.4203,1242.0754,2669.1484,8502.3746,1.07356323639794
-c21,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c22,3266.286,1553.2164,1242.0754,18.850157,6080.427957,1.50118327434065
-c23,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c24,3266.286,1553.2164,1242.0754,18.850157,6080.427957,1.50118327434065
-c25,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c26,3266.286,1553.2164,1235.5265,18.850157,6073.879057,1.50280186089649
-c27,3266.286,1553.2164,1235.5265,18.850157,6073.879057,1.50280186089649
-c28,3266.286,1604.6779,1291.4512,2669.1484,8831.5635,1.03354709465037
-c29,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c30,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c31,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c32,3266.286,1604.6779,1198.9482,18.850157,6088.762257,1.49912845416049
-c33,3266.286,1604.6779,1198.9482,18.850157,6088.762257,1.49912845416049
-c34,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c35,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c36,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c37,3266.286,1604.6779,1198.9482,18.850157,6088.762257,1.49912845416049
-c38,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c39,3083.7305,1641.1365,1198.9482,18.850157,5942.665357,1.53598363664374
-c40,3266.286,1553.2164,1235.5265,18.850157,6073.879057,1.50280186089649
-c41,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c42,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c43,3083.7305,1641.1365,1291.4512,18.850157,6035.168357,1.51244111329037
-c10,5845.527457
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,0,0,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,0,0,0,0,0,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,0,0,0
-c28,0,0,0,0,0,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,0,0,0,0,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,0,0,0,0,0,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,0,0,0,0,0,0
-c38,0,0,0,0,0,0
-c39,0,0,0,0,0,0
-c40,0,0,0,0,0,0
-c41,0,0,0,0,0,0
-c42,0,0,0,0,0,0
-c43,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_results1.csv b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_results1.csv
deleted file mode 100644
index 8a52300d986765ae762a169faa4b5ebc99509663..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_results1.csv
+++ /dev/null
@@ -1,143 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c1,13515.137,6763.194,4677.616,15.343114,24971.290114,1.86351232159846
-c2,13515.137,6763.194,4677.616,15.343114,24971.290114,1.86351232159846
-c3,13515.137,6763.194,5262.139,15.343114,25555.813114,1.82088930648889
-c4,16329.212,8052.39,5262.139,15.343114,29659.084114,1.56897315723708
-c5,13515.137,6763.194,4677.616,15.343114,24971.290114,1.86351232159846
-c6,13515.137,6763.194,4677.616,11.525024,24967.472024,1.86379729469163
-c7,13515.137,6763.194,5262.139,11.525024,25551.995024,1.82116139167122
-c8,16329.212,8052.39,5262.139,11.525024,29655.266024,1.56917516118123
-c6,24967.472024
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c1,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c2,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c3,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c4,3266.286,1604.6779,1198.9482,7.919790,6077.83189,1.50182448527999
-c5,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c6,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c7,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c8,3266.286,1604.6779,1198.9482,7.919790,6077.83189,1.50182448527999
-c3,5662.95879
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c1,13729.886,6763.194,4677.616,90.458562,25261.154562,1.84212905635707
-c2,13729.886,6763.194,4677.616,90.458562,25261.154562,1.84212905635707
-c3,13729.886,6763.194,5411.551,90.458562,25995.089562,1.79011911884361
-c4,16329.212,8052.39,5262.139,90.458562,29734.199562,1.56500956908117
-c5,13729.886,6763.194,4677.616,90.458562,25261.154562,1.84212905635707
-c6,13729.886,6763.194,4677.616,86.640472,25257.336472,1.84240752651597
-c7,13729.886,6763.194,5411.551,86.640472,25991.271472,1.79038208542789
-c8,16329.212,8052.39,5262.139,86.640472,29730.381472,1.56521055363198
-c6,25257.336472
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,28.951489,28.951489,0
-c2,0,0,0,28.951489,28.951489,0
-c3,0,0,0,28.951489,28.951489,0
-c4,0,0,0,28.951489,28.951489,0
-c5,0,0,0,28.951489,28.951489,0
-c6,0,0,0,28.951489,28.951489,0
-c7,0,0,0,28.951489,28.951489,0
-c8,0,0,0,28.951489,28.951489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,46.163959,46.163959,0
-c2,0,0,0,46.163959,46.163959,0
-c3,0,0,0,46.163959,46.163959,0
-c4,0,0,0,46.163959,46.163959,0
-c5,0,0,0,46.163959,46.163959,0
-c6,0,0,0,46.163959,46.163959,0
-c7,0,0,0,46.163959,46.163959,0
-c8,0,0,0,46.163959,46.163959,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,10.930367,10.930367,0
-c2,0,0,0,10.930367,10.930367,0
-c3,0,0,0,10.930367,10.930367,0
-c4,0,0,0,10.930367,10.930367,0
-c5,0,0,0,10.930367,10.930367,0
-c6,0,0,0,10.930367,10.930367,0
-c7,0,0,0,10.930367,10.930367,0
-c8,0,0,0,10.930367,10.930367,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,214.749,0,0,0,214.749,0
-c2,214.749,0,0,0,214.749,0
-c3,214.749,0,149.412,0,364.161,0
-c4,0,0,0,0,0,0
-c5,214.749,0,0,0,214.749,0
-c6,214.749,0,0,0,214.749,0
-c7,214.749,0,149.412,0,364.161,0
-c8,0,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,135.06,0,0,0,135.06,0
-c2,135.06,0,0,0,135.06,0
-c3,135.06,0,36.5783,0,171.6383,0
-c4,0,0,0,0,0,0
-c5,135.06,0,0,0,135.06,0
-c6,135.06,0,0,0,135.06,0
-c7,135.06,0,36.5783,0,171.6383,0
-c8,0,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c1,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c2,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c3,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c4,3266.286,1604.6779,1198.9482,18.850157,6088.762257,1.49912845416049
-c5,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c6,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c7,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c8,3266.286,1604.6779,1198.9482,18.850157,6088.762257,1.49912845416049
-c3,5845.527457
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_results2.csv b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_results2.csv
deleted file mode 100644
index 070c3ca75d9394b53a9d5d2779218edec113762e..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_results2.csv
+++ /dev/null
@@ -1,121 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c1,13515.137,6763.194,4677.616,11.525024,24967.472024,1.86379729469163
-c2,13515.137,6763.194,5262.139,11.525024,25551.995024,1.82116139167122
-c3,16329.212,8052.39,5262.139,11.525024,29655.266024,1.56917516118123
-c4,13515.137,6763.194,4677.616,11.525024,24967.472024,1.86379729469163
-c5,13515.137,6763.194,4677.616,7.585986,24963.532986,1.86409138641106
-c6,16329.212,8052.39,5262.139,7.585986,29651.326986,1.56938361864995
-c5,24963.532986
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c1,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c2,2948.6705,1507.4203,1198.9482,7.919790,5662.95879,1.61184940192988
-c3,3266.286,1604.6779,1198.9482,7.919790,6077.83189,1.50182448527999
-c4,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c5,2948.6705,1507.4203,1242.0754,7.919790,5706.08599,1.5996668742865
-c6,3266.286,1604.6779,1198.9482,7.919790,6077.83189,1.50182448527999
-c2,5662.95879
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16329.212,8052.39,5262.139,16890.566,46534.307,0.999999997851048
-c1,13729.886,6763.194,4677.616,86.640472,25257.336472,1.84240752651597
-c2,13729.886,6763.194,5411.551,86.640472,25991.271472,1.79038208542789
-c3,16329.212,8052.39,5262.139,86.640472,29730.381472,1.56521055363198
-c4,13729.886,6763.194,4677.616,86.640472,25257.336472,1.84240752651597
-c5,13729.886,6763.194,4677.616,82.701434,25253.397434,1.84269490619424
-c6,16329.212,8052.39,5262.139,82.701434,29726.442434,1.56541795900319
-c5,25253.397434
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,28.951489,28.951489,0
-c2,0,0,0,28.951489,28.951489,0
-c3,0,0,0,28.951489,28.951489,0
-c4,0,0,0,28.951489,28.951489,0
-c5,0,0,0,28.951489,28.951489,0
-c6,0,0,0,28.951489,28.951489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,46.163959,46.163959,0
-c2,0,0,0,46.163959,46.163959,0
-c3,0,0,0,46.163959,46.163959,0
-c4,0,0,0,46.163959,46.163959,0
-c5,0,0,0,46.163959,46.163959,0
-c6,0,0,0,46.163959,46.163959,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,10.930367,10.930367,0
-c2,0,0,0,10.930367,10.930367,0
-c3,0,0,0,10.930367,10.930367,0
-c4,0,0,0,10.930367,10.930367,0
-c5,0,0,0,10.930367,10.930367,0
-c6,0,0,0,10.930367,10.930367,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,214.749,0,0,0,214.749,0
-c2,214.749,0,149.412,0,364.161,0
-c3,0,0,0,0,0,0
-c4,214.749,0,0,0,214.749,0
-c5,214.749,0,0,0,214.749,0
-c6,0,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,135.06,0,0,0,135.06,0
-c2,135.06,0,36.5783,0,171.6383,0
-c3,0,0,0,0,0,0
-c4,135.06,0,0,0,135.06,0
-c5,135.06,0,0,0,135.06,0
-c6,0,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3266.286,1604.6779,1198.9482,3057.9248,9127.8369,0.999999989044502
-c1,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c2,3083.7305,1507.4203,1235.5265,18.850157,5845.527457,1.5615078042134
-c3,3266.286,1604.6779,1198.9482,18.850157,6088.762257,1.49912845416049
-c4,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c5,3083.7305,1507.4203,1242.0754,18.850157,5852.076357,1.5597603631924
-c6,3266.286,1604.6779,1198.9482,18.850157,6088.762257,1.49912845416049
-c2,5845.527457
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_tensors.txt b/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_tensors.txt
deleted file mode 100644
index 3fa0edb09a67a17457cfd7a9ed1d5557586b15cb..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GEOM/pipeline_GEOM_tensors.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-#Conv1,2
-Conv1,3230.39,16104.1,2915.93,13352.1,135.06,214.749,36.5275,220.001
-ClipRelu1,35.896,225.112,32.7405,163.037,146.657,769.821,36.3842,177.909
-#Conv2,3
-Conv2,1531.88,7675.32,1440.78,6485.81,45.7961,219.712,36.4586,170.747
-Add1,36.9761,189.903,32.585,132.874,152.052,635.102,36.3936,148.401
-ClipRelu2,35.8218,187.167,34.0553,144.51,45.6549,185.567,36.3736,153.724
-#Conv3,2
-Conv3,1163.22,5096.2,1209.29,4559.4,49.3758,206.121,36.5783,149.412
-ClipRelu3,35.7282,165.939,32.7854,118.216,165.958,602.927,36.5443,132.298
-#Conv4,2
-Conv4,3022.09,16672.3,2636.43,13346.5,52.0572,187.688,36.6862,213.45
-ClipRelu4,35.8348,218.266,32.7184,153.056,165.584,825.553,36.6609,169.727
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_confs1.txt b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_confs1.txt
deleted file mode 100644
index 24c151f89f008f3d34a44852bf82df6fa6943607..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_confs1.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-9 9,9 9,9 9
-8 8,8 8,7
-8 8,9 9,7
-8 8,9 9,7
-8 8,8 8,7
-9 9,8 8,7
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_confs2.txt b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_confs2.txt
deleted file mode 100644
index 66478e91d8eccacfd04be6fe0b4a9051b95b8cd3..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_confs2.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-9 9,9 9,9 9
-7,8 8,6
-7,9 9,7
-7,8 8,7
-7,9 9,7
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_fp16.csv b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_fp16.csv
deleted file mode 100644
index 119769d485e8f3635444942e1577dfb1f30ffb63..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_fp16.csv
+++ /dev/null
@@ -1,18 +0,0 @@
-ClipRelu1,32.7258,163.65,121.949,41.7015,5000.72,3726.44,1274.27,0.1971,1.70103,1.4139,0.398118,49.4173,42.3377,10.335
-ClipRelu1_f2h,146.88,774.188,616.345,157.843,5271.44,4196.76,1074.68,4.2071,19.2749,14.9998,4.3002,23.6111,21.7033,3.0557
-ClipRelu1_h2f,36.6148,179.907,130.314,49.5932,4913.52,3559.06,1354.45,0.222917,1.8488,1.52567,0.447701,41.4702,36.6395,8.40931
-ClipRelu2,32.7748,123.92,80.2193,43.7008,3780.97,2447.6,1333.37,0.445617,2.16331,1.5542,0.692356,42.4077,34.4301,11.1092
-ClipRelu2_f2h,146.27,552.927,385.072,167.855,3780.54,2632.94,1147.6,3.76973,12.337,8.22917,4.16647,18.9891,17.2608,3.32655
-ClipRelu2_h2f,36.484,139.599,88.2673,51.3316,3826.26,2419.31,1406.95,0.289736,1.58681,1.08099,0.562266,24.5146,18.7651,8.30503
-ClipRelu3,32.4827,162.777,121.496,41.2813,5011.23,3740.36,1270.88,0.281157,2.02032,1.60136,0.508272,48.9996,40.6658,11.6879
-ClipRelu3_f2h,146.047,772.606,615.731,156.875,5290.56,4216.4,1074.16,3.56391,16.2321,12.5509,3.7181,21.601,20.3207,3.37736
-ClipRelu3_h2f,36.3734,179.038,130.004,49.0347,4922.22,3574.13,1348.09,0.280951,1.95425,1.54761,0.494932,35.4942,30.786,8.04821
-Conv1,2906.31,13461.6,10671.8,2789.79,4631.98,3672.06,959.919,17.7692,22.4048,12.9946,13.4788,22.4664,20.6187,2.76273
-Conv1_f2h,56.1955,96.0964,23.101,72.9955,1710.16,411.082,1299.08,0.937944,1.71283,1.24906,1.05611,23.7767,21.1557,14.9728
-Conv1_h2f,36.7417,222.237,189.427,32.8092,6050.45,5157.21,893.24,0.714378,1.89711,1.60827,0.301805,104.891,89.2244,15.8479
-Conv2,1133.43,4702.49,3426.56,1275.92,4149.01,3023.27,1125.73,5.59071,13.8692,13.5082,4.35632,21.9851,20.0789,2.81015
-Conv2_f2h,48.4445,232.944,162.751,70.1925,4808.55,3359.62,1448.93,0.734068,3.74504,2.72272,1.13338,37.893,33.4323,7.75873
-Conv2_h2f,36.5806,152.865,116.681,36.1843,4178.89,3189.72,989.176,0.326615,1.46034,1.11218,0.355733,22.9956,17.5344,5.81516
-Conv3,2473.6,13742.6,11357,2385.59,5555.74,4591.32,964.423,7.28207,12.9199,10.5659,5.71242,14.463,13.641,1.27044
-Conv3_f2h,48.5975,185.44,112.819,72.6212,3816.02,2321.67,1494.35,0.678009,2.51404,1.7155,1.06,34.9156,31.6881,7.7767
-Conv3_h2f,36.5346,221.214,188.635,32.5786,6056.57,5164.61,891.963,0.637827,2.10279,1.77584,0.340253,107.972,91.7763,16.4052
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_fp32.csv b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_fp32.csv
deleted file mode 100644
index 3b793e99df3c860b148a514488630229c6122385..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_fp32.csv
+++ /dev/null
@@ -1,6 +0,0 @@
-ClipRelu1,35.9208,226.08,190.765,35.3154,6293.88,5310.73,983.149,0.271835,2.25039,1.86576,0.397483,44.304,36.3115,8.44462
-ClipRelu2,35.7686,170.519,130.589,39.9305,4767.29,3650.94,1116.35,0.291639,1.73338,1.30825,0.434816,28.4654,21.5327,7.40692
-ClipRelu3,35.5024,222.945,188.105,34.8406,6279.77,5298.41,981.364,0.35378,2.38839,1.9942,0.407469,36.9255,30.7992,6.8009
-Conv1,3133.21,16042.6,13049.2,2993.34,5120.24,4164.87,955.365,12.2858,15.5634,11.1935,8.89478,18.1658,16.3007,2.63801
-Conv2,1158.82,5542.63,4326.76,1215.87,4783.12,3733.88,1049.24,6.91453,11.4784,7.18899,6.17588,21.3002,20.0687,1.95484
-Conv3,2954.1,17122.9,14368.5,2754.37,5796.34,4863.95,932.392,6.76714,14.0092,11.3429,6.20296,12.2512,11.6083,1.38573
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_layers.txt b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_layers.txt
deleted file mode 100644
index deefedb4efd481a87ecb2d58d8e5d503ff1daa7e..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_layers.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-Conv1,2000,1,240,300,1,1,9,9,1,1
-Conv2,2000,1,240,300,1,1,3,3,1,1
-Conv3,2000,1,240,300,1,1,9,9,1,1
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_ops.txt b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_ops.txt
deleted file mode 100644
index 6dbd74c42edb6ae286efef7aaad8239709c29748..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_ops.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-#Conv1,2
-Conv1
-ClipRelu1
-#Conv2,2
-Conv2
-ClipRelu2
-#Conv3,2
-Conv3
-ClipRelu3
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_confs1.txt b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_confs1.txt
deleted file mode 100644
index e061e9d839507ea88117b1ebacf5a705d477171f..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_confs1.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-9 9,9 9,9 9
-8 8,9 9,9 9
-8 8,8 8,8 8
-8 8,8 8,9 9
-8 8,9 9,8 8
-8 8,8 8,7
-9 9,8 8,8 8
-9 9,9 9,8 8
-9 9,8 8,9 9
-8 8,9 9,9 9
-8 8,9 9,7
-9 9,9 9,9 9
-9 9,8 8,7
-9 9,9 9,7
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_confs2.txt b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_confs2.txt
deleted file mode 100644
index 4f35063f25b6b4b6f63252258c2545953e6b2a75..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_confs2.txt
+++ /dev/null
@@ -1,52 +0,0 @@
-9 9,9 9,9 9
-7,9 9,3
-5,9 9,4
-4,9 9,5
-4,8 8,6
-4,9 9,6
-4,9 9,6
-5,8 8,5
-6,8 8,4
-6,9 9,4
-6,9 9,4
-5,9 9,5
-5,9 9,5
-4,9 9,7
-7,8 8,4
-5,8 8,6
-7,8 8,4
-5,9 9,6
-7,9 9,4
-6,8 8,5
-7,9 9,4
-6,9 9,5
-6,9 9,5
-5,8 8,7
-5,8 8,7
-5,9 9,7
-5,9 9,7
-7,8 8,5
-6,9 9,6
-6,9 9,6
-6,8 8,6
-9 9,8 8,3
-8 8,9 9,4
-7,9 9,9 9
-5,8 8,8 8
-3,9 9,8 8
-7,9 9,7
-7,8 8,7
-9 9,9 9,5
-8 8,8 8,4
-8 8,9 9,2
-8 8,9 9,8 8
-9 9,8 8,4
-8 8,9 9,7
-4,9 9,9 9
-8 8,9 9,6
-8 8,9 9,5
-8 8,8 8,3
-8 8,8 8,8 8
-8 8,8 8,5
-7,8 8,8 8
-9 9,9 9,2
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_results1.csv b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_results1.csv
deleted file mode 100644
index 408ee686dfca8f052cbf009b197115c6e2755081..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_results1.csv
+++ /dev/null
@@ -1,198 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c1,13625.25,5713.149,17345.845,36684.244,1.07205899875691
-c2,13625.25,4826.41,13905.377,32357.037,1.21542877607913
-c3,13625.25,4826.41,17345.845,35797.505,1.09861494230222
-c4,13625.25,5713.149,13905.377,33243.776,1.18300862939574
-c5,13625.25,4826.41,15.343114,18467.003114,2.12961862540779
-c6,16268.68,4826.41,13905.377,35000.467,1.12363283288868
-c7,16268.68,5713.149,13905.377,35887.206,1.09586892583427
-c8,16268.68,4826.41,17345.845,38440.935,1.02306756840574
-c9,13625.25,5713.149,17345.845,36684.244,1.07205899875691
-c10,13625.25,5713.149,15.343114,19353.742114,2.03204494330566
-c11,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c12,16268.68,4826.41,15.343114,21110.433114,1.86294964207171
-c13,16268.68,5713.149,15.343114,21997.172114,1.78785134822785
-c5,18467.003114
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c1,2939.0358,1194.5886,2989.6024,7123.2268,1.03230205961851
-c2,2939.0358,1166.2048,2506.0827,6611.3233,1.11223144824529
-c3,2939.0358,1166.2048,2989.6024,7094.843,1.03643191207428
-c4,2939.0358,1194.5886,2506.0827,6639.7071,1.10747681765244
-c5,2939.0358,1166.2048,7.919790,4113.16039,1.78775465189786
-c6,3169.1308,1166.2048,2506.0827,6841.4183,1.0748241621942
-c7,3169.1308,1194.5886,2506.0827,6869.8021,1.07038333651004
-c8,3169.1308,1166.2048,2989.6024,7324.938,1.00387494059506
-c9,2939.0358,1194.5886,2989.6024,7123.2268,1.03230205961851
-c10,2939.0358,1194.5886,7.919790,4141.54419,1.77550239357696
-c11,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c12,3169.1308,1166.2048,7.919790,4343.25539,1.69304380479814
-c13,3169.1308,1194.5886,7.919790,4371.63919,1.6820513569865
-c5,4113.16039
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c1,13721.3464,5866.014,17345.845,36933.2054,1.06483240400024
-c2,13721.3464,4826.41,13905.377,32453.1334,1.2118297914129
-c3,13721.3464,4826.41,17567.059,36114.8154,1.08896234012327
-c4,13721.3464,5866.014,14090.817,33678.1774,1.1677494721916
-c5,13721.3464,4826.41,90.458562,18638.214962,2.11005581109439
-c6,16268.68,5059.354,13905.377,35233.411,1.11620398854881
-c7,16268.68,5713.149,14090.817,36072.646,1.09023535148978
-c8,16268.68,5059.354,17567.059,38895.093,1.01112173453057
-c9,13721.3464,5866.014,17345.845,36933.2054,1.06483240400024
-c10,13721.3464,5866.014,90.458562,19677.818962,1.99857890125365
-c11,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c12,16268.68,5059.354,90.458562,21418.492562,1.83615507499152
-c13,16268.68,5713.149,90.458562,22072.287562,1.78176701039046
-c5,18638.214962
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,28.951489,28.951489,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c10,0,0,28.951489,28.951489,0
-c11,0,0,0,0,0
-c12,0,0,28.951489,28.951489,0
-c13,0,0,28.951489,28.951489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,46.163959,46.163959,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c10,0,0,46.163959,46.163959,0
-c11,0,0,0,0,0
-c12,0,0,46.163959,46.163959,0
-c13,0,0,46.163959,46.163959,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,10.930367,10.930367,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c10,0,0,10.930367,10.930367,0
-c11,0,0,0,0,0
-c12,0,0,10.930367,10.930367,0
-c13,0,0,10.930367,10.930367,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c10,0,0,0,0,0
-c11,0,0,0,0,0
-c12,0,0,0,0,0
-c13,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,96.0964,152.865,0,248.9614,0
-c2,96.0964,0,0,96.0964,0
-c3,96.0964,0,221.214,317.3104,0
-c4,96.0964,152.865,185.44,434.4014,0
-c5,96.0964,0,0,96.0964,0
-c6,0,232.944,0,232.944,0
-c7,0,0,185.44,185.44,0
-c8,0,232.944,221.214,454.158,0
-c9,96.0964,152.865,0,248.9614,0
-c10,96.0964,152.865,0,248.9614,0
-c11,0,0,0,0,0
-c12,0,232.944,0,232.944,0
-c13,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,56.1955,36.5806,0,92.7761,0
-c2,56.1955,0,0,56.1955,0
-c3,56.1955,0,36.5346,92.7301,0
-c4,56.1955,36.5806,48.5975,141.3736,0
-c5,56.1955,0,0,56.1955,0
-c6,0,48.4445,0,48.4445,0
-c7,0,0,48.5975,48.5975,0
-c8,0,48.4445,36.5346,84.9791,0
-c9,56.1955,36.5806,0,92.7761,0
-c10,56.1955,36.5806,0,92.7761,0
-c11,0,0,0,0,0
-c12,0,48.4445,0,48.4445,0
-c13,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c1,2995.2313,1231.1692,2989.6024,7216.0029,1.01902975927255
-c2,2995.2313,1166.2048,2506.0827,6667.5188,1.10285728623881
-c3,2995.2313,1166.2048,3026.137,7187.5731,1.02306043992707
-c4,2995.2313,1231.1692,2554.6802,6781.0807,1.08438787515996
-c5,2995.2313,1166.2048,18.850157,4180.286257,1.75904738862845
-c6,3169.1308,1214.6493,2506.0827,6889.8628,1.06726678117209
-c7,3169.1308,1194.5886,2554.6802,6918.3996,1.06286455233282
-c8,3169.1308,1214.6493,3026.137,7409.9171,0.992362208851672
-c9,2995.2313,1231.1692,2989.6024,7216.0029,1.01902975927255
-c10,2995.2313,1231.1692,18.850157,4245.250657,1.73212896502641
-c11,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c12,3169.1308,1214.6493,18.850157,4402.630257,1.67021103379904
-c13,3169.1308,1194.5886,18.850157,4382.569557,1.67785622945091
-c5,4180.286257
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c10,0,0,0,0,0
-c11,0,0,0,0,0
-c12,0,0,0,0,0
-c13,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_results2.csv b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_results2.csv
deleted file mode 100644
index 4bbd96279b68828be34786e140f3ba91321ce530..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_promise_results2.csv
+++ /dev/null
@@ -1,616 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c1,15.343114,5713.149,5.046064,5733.538178,6.85923283200238
-c2,7.585986,5713.149,6.799450,5727.534436,6.86642284787787
-c3,6.799450,5713.149,7.585986,5727.534436,6.86642284787787
-c4,6.799450,4826.41,11.525024,4844.734474,8.11761168734772
-c5,6.799450,5713.149,11.525024,5731.473474,6.86170379959602
-c6,6.799450,5713.149,11.525024,5731.473474,6.86170379959602
-c7,7.585986,4826.41,7.585986,4841.581972,8.12289731231473
-c8,11.525024,4826.41,6.799450,4844.734474,8.11761168734772
-c9,11.525024,5713.149,6.799450,5731.473474,6.86170379959602
-c10,11.525024,5713.149,6.799450,5731.473474,6.86170379959602
-c11,7.585986,5713.149,7.585986,5728.320972,6.86548004305021
-c12,7.585986,5713.149,7.585986,5728.320972,6.86548004305021
-c13,6.799450,5713.149,15.343114,5735.291564,6.85713583615231
-c14,15.343114,4826.41,6.799450,4848.552564,8.11121931128105
-c15,7.585986,4826.41,11.525024,4845.52101,8.11629401816805
-c16,15.343114,4826.41,6.799450,4848.552564,8.11121931128105
-c17,7.585986,5713.149,11.525024,5732.26001,6.86076229014667
-c18,15.343114,5713.149,6.799450,5735.291564,6.85713583615231
-c19,11.525024,4826.41,7.585986,4845.52101,8.11629401816805
-c20,15.343114,5713.149,6.799450,5735.291564,6.85713583615231
-c21,11.525024,5713.149,7.585986,5732.26001,6.86076229014667
-c22,11.525024,5713.149,7.585986,5732.26001,6.86076229014667
-c23,7.585986,4826.41,15.343114,4849.3391,8.10990371636614
-c24,7.585986,4826.41,15.343114,4849.3391,8.10990371636614
-c25,7.585986,5713.149,15.343114,5736.0781,6.8561955797604
-c26,7.585986,5713.149,15.343114,5736.0781,6.8561955797604
-c27,15.343114,4826.41,7.585986,4849.3391,8.10990371636614
-c28,11.525024,5713.149,11.525024,5736.199048,6.85605101658859
-c29,11.525024,5713.149,11.525024,5736.199048,6.85605101658859
-c30,11.525024,4826.41,11.525024,4849.460048,8.10970145124698
-c31,16268.68,4826.41,5.046064,21100.136064,1.863858777703
-c32,13625.25,5713.149,6.799450,19345.19845,2.03294238094031
-c33,15.343114,5713.149,17345.845,23074.337114,1.70439019050734
-c34,7.585986,4826.41,13905.377,18739.372986,2.09866540462772
-c35,5.046064,5713.149,13905.377,19623.572064,2.00410372134732
-c36,15.343114,5713.149,15.343114,5743.835228,6.84693619405936
-c37,15.343114,4826.41,15.343114,4857.096228,8.09695162380975
-c38,16268.68,5713.149,7.585986,21989.414986,1.78848204220942
-c39,13625.25,4826.41,6.799450,18458.45945,2.13060433854027
-c40,13625.25,5713.149,4.563278,19342.962278,2.03317740227474
-c41,13625.25,5713.149,13905.377,33243.776,1.18300862939574
-c42,16268.68,4826.41,6.799450,21101.88945,1.86370390702761
-c43,13625.25,5713.149,15.343114,19353.742114,2.03204494330566
-c44,6.799450,5713.149,17345.845,23065.79345,1.7050215035849
-c45,13625.25,5713.149,11.525024,19349.924024,2.03244590252534
-c46,13625.25,5713.149,7.585986,19345.984986,2.03285972904321
-c47,13625.25,4826.41,5.046064,18456.706064,2.13080674582711
-c48,13625.25,4826.41,13905.377,32357.037,1.21542877607913
-c49,13625.25,4826.41,7.585986,18459.245986,2.13051355492937
-c50,15.343114,4826.41,13905.377,18747.130114,2.09779702552185
-c51,16268.68,5713.149,4.563278,21986.392278,1.78872792424791
-c7,4841.581972
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c1,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c2,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c3,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c4,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c5,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c6,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c7,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c8,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c9,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c10,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c11,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c12,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c13,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c14,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c15,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c16,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c17,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c18,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c19,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c20,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c21,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c22,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c23,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c24,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c25,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c26,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c27,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c28,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c29,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c30,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c31,3169.1308,1166.2048,7.919790,4343.25539,1.69304380479814
-c32,2939.0358,1194.5886,7.919790,4141.54419,1.77550239357696
-c33,7.919790,1194.5886,2989.6024,4192.11079,1.75408570835778
-c34,7.919790,1166.2048,2506.0827,3680.20729,1.99807266839927
-c35,7.919790,1194.5886,2506.0827,3708.59109,1.98278036679476
-c36,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c37,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c38,3169.1308,1194.5886,7.919790,4371.63919,1.6820513569865
-c39,2939.0358,1166.2048,7.919790,4113.16039,1.78775465189786
-c40,2939.0358,1194.5886,7.919790,4141.54419,1.77550239357696
-c41,2939.0358,1194.5886,2506.0827,6639.7071,1.10747681765244
-c42,3169.1308,1166.2048,7.919790,4343.25539,1.69304380479814
-c43,2939.0358,1194.5886,7.919790,4141.54419,1.77550239357696
-c44,7.919790,1194.5886,2989.6024,4192.11079,1.75408570835778
-c45,2939.0358,1194.5886,7.919790,4141.54419,1.77550239357696
-c46,2939.0358,1194.5886,7.919790,4141.54419,1.77550239357696
-c47,2939.0358,1166.2048,7.919790,4113.16039,1.78775465189786
-c48,2939.0358,1166.2048,2506.0827,6611.3233,1.11223144824529
-c49,2939.0358,1166.2048,7.919790,4113.16039,1.78775465189786
-c50,7.919790,1166.2048,2506.0827,3680.20729,1.99807266839927
-c51,3169.1308,1194.5886,7.919790,4371.63919,1.6820513569865
-c4,1182.04438
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c1,90.458562,5713.149,80.161512,5883.769074,6.68409532001807
-c2,82.701434,5713.149,81.914898,5877.765332,6.69092267375804
-c3,81.914898,5713.149,82.701434,5877.765332,6.69092267375804
-c4,81.914898,4826.41,86.640472,4994.96537,7.87346263676974
-c5,81.914898,5713.149,86.640472,5881.70437,6.68644169400099
-c6,81.914898,5713.149,86.640472,5881.70437,6.68644169400099
-c7,82.701434,4826.41,82.701434,4991.812868,7.87843499989081
-c8,86.640472,4826.41,81.914898,4994.96537,7.87346263676974
-c9,86.640472,5713.149,81.914898,5881.70437,6.68644169400099
-c10,86.640472,5713.149,81.914898,5881.70437,6.68644169400099
-c11,82.701434,5713.149,82.701434,5878.551868,6.69002744452731
-c12,82.701434,5713.149,82.701434,5878.551868,6.69002744452731
-c13,81.914898,5713.149,90.458562,5885.52246,6.68210402714011
-c14,90.458562,4826.41,81.914898,4998.78346,7.86744885589725
-c15,82.701434,4826.41,86.640472,4995.751906,7.87222303124067
-c16,90.458562,4826.41,81.914898,4998.78346,7.86744885589725
-c17,82.701434,5713.149,86.640472,5882.490906,6.68554766337708
-c18,90.458562,5713.149,81.914898,5885.52246,6.68210402714011
-c19,86.640472,4826.41,82.701434,4995.751906,7.87222303124067
-c20,90.458562,5713.149,81.914898,5885.52246,6.68210402714011
-c21,86.640472,5713.149,82.701434,5882.490906,6.68554766337708
-c22,86.640472,5713.149,82.701434,5882.490906,6.68554766337708
-c23,82.701434,4826.41,90.458562,4999.569996,7.86621114312706
-c24,82.701434,4826.41,90.458562,4999.569996,7.86621114312706
-c25,82.701434,5713.149,90.458562,5886.308996,6.68121115602387
-c26,82.701434,5713.149,90.458562,5886.308996,6.68121115602387
-c27,90.458562,4826.41,82.701434,4999.569996,7.86621114312706
-c28,86.640472,5713.149,86.640472,5886.429944,6.68107387772092
-c29,86.640472,5713.149,86.640472,5886.429944,6.68107387772092
-c30,86.640472,4826.41,86.640472,4999.690944,7.8660208508676
-c31,16268.68,5059.354,80.161512,21408.195512,1.83703824053044
-c32,13721.3464,5866.014,81.914898,19669.275298,1.99944701592815
-c33,90.458562,5713.149,17345.845,23149.452562,1.69885977755995
-c34,82.701434,4826.41,13905.377,18814.488434,2.090286638881
-c35,80.161512,5713.149,14090.817,19884.127512,1.9778425670667
-c36,90.458562,5713.149,90.458562,5894.066124,6.67241807359781
-c37,90.458562,4826.41,90.458562,5007.327124,7.85402515967069
-c38,16268.68,5713.149,82.701434,22064.530434,1.78239341822382
-c39,13721.3464,4826.41,81.914898,18629.671298,2.11102349364155
-c40,13721.3464,5866.014,79.678726,19667.039126,1.9996743560672
-c41,13721.3464,5866.014,14090.817,33678.1774,1.1677494721916
-c42,16268.68,5059.354,81.914898,21409.948898,1.83688779471982
-c43,13721.3464,5866.014,90.458562,19677.818962,1.99857890125365
-c44,81.914898,5713.149,17345.845,23140.908898,1.69948699955559
-c45,13721.3464,5866.014,86.640472,19674.000872,1.99896676105542
-c46,13721.3464,5866.014,82.701434,19670.061834,1.99936706513473
-c47,13721.3464,4826.41,80.161512,18627.917912,2.11122219749224
-c48,13721.3464,4826.41,13905.377,32453.1334,1.2118297914129
-c49,13721.3464,4826.41,82.701434,18630.457834,2.1109343709812
-c50,90.458562,4826.41,13905.377,18822.245562,2.08942517838868
-c51,16268.68,5713.149,79.678726,22061.507726,1.78263762885923
-c7,4991.812868
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,28.951489,0,28.951489,57.902978,0
-c2,28.951489,0,28.951489,57.902978,0
-c3,28.951489,0,28.951489,57.902978,0
-c4,28.951489,0,28.951489,57.902978,0
-c5,28.951489,0,28.951489,57.902978,0
-c6,28.951489,0,28.951489,57.902978,0
-c7,28.951489,0,28.951489,57.902978,0
-c8,28.951489,0,28.951489,57.902978,0
-c9,28.951489,0,28.951489,57.902978,0
-c10,28.951489,0,28.951489,57.902978,0
-c11,28.951489,0,28.951489,57.902978,0
-c12,28.951489,0,28.951489,57.902978,0
-c13,28.951489,0,28.951489,57.902978,0
-c14,28.951489,0,28.951489,57.902978,0
-c15,28.951489,0,28.951489,57.902978,0
-c16,28.951489,0,28.951489,57.902978,0
-c17,28.951489,0,28.951489,57.902978,0
-c18,28.951489,0,28.951489,57.902978,0
-c19,28.951489,0,28.951489,57.902978,0
-c20,28.951489,0,28.951489,57.902978,0
-c21,28.951489,0,28.951489,57.902978,0
-c22,28.951489,0,28.951489,57.902978,0
-c23,28.951489,0,28.951489,57.902978,0
-c24,28.951489,0,28.951489,57.902978,0
-c25,28.951489,0,28.951489,57.902978,0
-c26,28.951489,0,28.951489,57.902978,0
-c27,28.951489,0,28.951489,57.902978,0
-c28,28.951489,0,28.951489,57.902978,0
-c29,28.951489,0,28.951489,57.902978,0
-c30,28.951489,0,28.951489,57.902978,0
-c31,0,0,28.951489,28.951489,0
-c32,0,0,28.951489,28.951489,0
-c33,28.951489,0,0,28.951489,0
-c34,28.951489,0,0,28.951489,0
-c35,28.951489,0,0,28.951489,0
-c36,28.951489,0,28.951489,57.902978,0
-c37,28.951489,0,28.951489,57.902978,0
-c38,0,0,28.951489,28.951489,0
-c39,0,0,28.951489,28.951489,0
-c40,0,0,28.951489,28.951489,0
-c41,0,0,0,0,0
-c42,0,0,28.951489,28.951489,0
-c43,0,0,28.951489,28.951489,0
-c44,28.951489,0,0,28.951489,0
-c45,0,0,28.951489,28.951489,0
-c46,0,0,28.951489,28.951489,0
-c47,0,0,28.951489,28.951489,0
-c48,0,0,0,0,0
-c49,0,0,28.951489,28.951489,0
-c50,28.951489,0,0,28.951489,0
-c51,0,0,28.951489,28.951489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,46.163959,0,46.163959,92.327918,0
-c2,46.163959,0,46.163959,92.327918,0
-c3,46.163959,0,46.163959,92.327918,0
-c4,46.163959,0,46.163959,92.327918,0
-c5,46.163959,0,46.163959,92.327918,0
-c6,46.163959,0,46.163959,92.327918,0
-c7,46.163959,0,46.163959,92.327918,0
-c8,46.163959,0,46.163959,92.327918,0
-c9,46.163959,0,46.163959,92.327918,0
-c10,46.163959,0,46.163959,92.327918,0
-c11,46.163959,0,46.163959,92.327918,0
-c12,46.163959,0,46.163959,92.327918,0
-c13,46.163959,0,46.163959,92.327918,0
-c14,46.163959,0,46.163959,92.327918,0
-c15,46.163959,0,46.163959,92.327918,0
-c16,46.163959,0,46.163959,92.327918,0
-c17,46.163959,0,46.163959,92.327918,0
-c18,46.163959,0,46.163959,92.327918,0
-c19,46.163959,0,46.163959,92.327918,0
-c20,46.163959,0,46.163959,92.327918,0
-c21,46.163959,0,46.163959,92.327918,0
-c22,46.163959,0,46.163959,92.327918,0
-c23,46.163959,0,46.163959,92.327918,0
-c24,46.163959,0,46.163959,92.327918,0
-c25,46.163959,0,46.163959,92.327918,0
-c26,46.163959,0,46.163959,92.327918,0
-c27,46.163959,0,46.163959,92.327918,0
-c28,46.163959,0,46.163959,92.327918,0
-c29,46.163959,0,46.163959,92.327918,0
-c30,46.163959,0,46.163959,92.327918,0
-c31,0,0,46.163959,46.163959,0
-c32,0,0,46.163959,46.163959,0
-c33,46.163959,0,0,46.163959,0
-c34,46.163959,0,0,46.163959,0
-c35,46.163959,0,0,46.163959,0
-c36,46.163959,0,46.163959,92.327918,0
-c37,46.163959,0,46.163959,92.327918,0
-c38,0,0,46.163959,46.163959,0
-c39,0,0,46.163959,46.163959,0
-c40,0,0,46.163959,46.163959,0
-c41,0,0,0,0,0
-c42,0,0,46.163959,46.163959,0
-c43,0,0,46.163959,46.163959,0
-c44,46.163959,0,0,46.163959,0
-c45,0,0,46.163959,46.163959,0
-c46,0,0,46.163959,46.163959,0
-c47,0,0,46.163959,46.163959,0
-c48,0,0,0,0,0
-c49,0,0,46.163959,46.163959,0
-c50,46.163959,0,0,46.163959,0
-c51,0,0,46.163959,46.163959,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,10.930367,0,10.930367,21.860734,0
-c2,10.930367,0,10.930367,21.860734,0
-c3,10.930367,0,10.930367,21.860734,0
-c4,10.930367,0,10.930367,21.860734,0
-c5,10.930367,0,10.930367,21.860734,0
-c6,10.930367,0,10.930367,21.860734,0
-c7,10.930367,0,10.930367,21.860734,0
-c8,10.930367,0,10.930367,21.860734,0
-c9,10.930367,0,10.930367,21.860734,0
-c10,10.930367,0,10.930367,21.860734,0
-c11,10.930367,0,10.930367,21.860734,0
-c12,10.930367,0,10.930367,21.860734,0
-c13,10.930367,0,10.930367,21.860734,0
-c14,10.930367,0,10.930367,21.860734,0
-c15,10.930367,0,10.930367,21.860734,0
-c16,10.930367,0,10.930367,21.860734,0
-c17,10.930367,0,10.930367,21.860734,0
-c18,10.930367,0,10.930367,21.860734,0
-c19,10.930367,0,10.930367,21.860734,0
-c20,10.930367,0,10.930367,21.860734,0
-c21,10.930367,0,10.930367,21.860734,0
-c22,10.930367,0,10.930367,21.860734,0
-c23,10.930367,0,10.930367,21.860734,0
-c24,10.930367,0,10.930367,21.860734,0
-c25,10.930367,0,10.930367,21.860734,0
-c26,10.930367,0,10.930367,21.860734,0
-c27,10.930367,0,10.930367,21.860734,0
-c28,10.930367,0,10.930367,21.860734,0
-c29,10.930367,0,10.930367,21.860734,0
-c30,10.930367,0,10.930367,21.860734,0
-c31,0,0,10.930367,10.930367,0
-c32,0,0,10.930367,10.930367,0
-c33,10.930367,0,0,10.930367,0
-c34,10.930367,0,0,10.930367,0
-c35,10.930367,0,0,10.930367,0
-c36,10.930367,0,10.930367,21.860734,0
-c37,10.930367,0,10.930367,21.860734,0
-c38,0,0,10.930367,10.930367,0
-c39,0,0,10.930367,10.930367,0
-c40,0,0,10.930367,10.930367,0
-c41,0,0,0,0,0
-c42,0,0,10.930367,10.930367,0
-c43,0,0,10.930367,10.930367,0
-c44,10.930367,0,0,10.930367,0
-c45,0,0,10.930367,10.930367,0
-c46,0,0,10.930367,10.930367,0
-c47,0,0,10.930367,10.930367,0
-c48,0,0,0,0,0
-c49,0,0,10.930367,10.930367,0
-c50,10.930367,0,0,10.930367,0
-c51,0,0,10.930367,10.930367,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c10,0,0,0,0,0
-c11,0,0,0,0,0
-c12,0,0,0,0,0
-c13,0,0,0,0,0
-c14,0,0,0,0,0
-c15,0,0,0,0,0
-c16,0,0,0,0,0
-c17,0,0,0,0,0
-c18,0,0,0,0,0
-c19,0,0,0,0,0
-c20,0,0,0,0,0
-c21,0,0,0,0,0
-c22,0,0,0,0,0
-c23,0,0,0,0,0
-c24,0,0,0,0,0
-c25,0,0,0,0,0
-c26,0,0,0,0,0
-c27,0,0,0,0,0
-c28,0,0,0,0,0
-c29,0,0,0,0,0
-c30,0,0,0,0,0
-c31,0,0,0,0,0
-c32,0,0,0,0,0
-c33,0,0,0,0,0
-c34,0,0,0,0,0
-c35,0,0,0,0,0
-c36,0,0,0,0,0
-c37,0,0,0,0,0
-c38,0,0,0,0,0
-c39,0,0,0,0,0
-c40,0,0,0,0,0
-c41,0,0,0,0,0
-c42,0,0,0,0,0
-c43,0,0,0,0,0
-c44,0,0,0,0,0
-c45,0,0,0,0,0
-c46,0,0,0,0,0
-c47,0,0,0,0,0
-c48,0,0,0,0,0
-c49,0,0,0,0,0
-c50,0,0,0,0,0
-c51,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c10,0,0,0,0,0
-c11,0,0,0,0,0
-c12,0,0,0,0,0
-c13,0,0,0,0,0
-c14,0,0,0,0,0
-c15,0,0,0,0,0
-c16,0,0,0,0,0
-c17,0,0,0,0,0
-c18,0,0,0,0,0
-c19,0,0,0,0,0
-c20,0,0,0,0,0
-c21,0,0,0,0,0
-c22,0,0,0,0,0
-c23,0,0,0,0,0
-c24,0,0,0,0,0
-c25,0,0,0,0,0
-c26,0,0,0,0,0
-c27,0,0,0,0,0
-c28,0,0,0,0,0
-c29,0,0,0,0,0
-c30,0,0,0,0,0
-c31,0,232.944,0,232.944,0
-c32,96.0964,152.865,0,248.9614,0
-c33,0,0,0,0,0
-c34,0,0,0,0,0
-c35,0,0,185.44,185.44,0
-c36,0,0,0,0,0
-c37,0,0,0,0,0
-c38,0,0,0,0,0
-c39,96.0964,0,0,96.0964,0
-c40,96.0964,152.865,0,248.9614,0
-c41,96.0964,152.865,185.44,434.4014,0
-c42,0,232.944,0,232.944,0
-c43,96.0964,152.865,0,248.9614,0
-c44,0,0,0,0,0
-c45,96.0964,152.865,0,248.9614,0
-c46,96.0964,152.865,0,248.9614,0
-c47,96.0964,0,0,96.0964,0
-c48,96.0964,0,0,96.0964,0
-c49,96.0964,0,0,96.0964,0
-c50,0,0,0,0,0
-c51,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c10,0,0,0,0,0
-c11,0,0,0,0,0
-c12,0,0,0,0,0
-c13,0,0,0,0,0
-c14,0,0,0,0,0
-c15,0,0,0,0,0
-c16,0,0,0,0,0
-c17,0,0,0,0,0
-c18,0,0,0,0,0
-c19,0,0,0,0,0
-c20,0,0,0,0,0
-c21,0,0,0,0,0
-c22,0,0,0,0,0
-c23,0,0,0,0,0
-c24,0,0,0,0,0
-c25,0,0,0,0,0
-c26,0,0,0,0,0
-c27,0,0,0,0,0
-c28,0,0,0,0,0
-c29,0,0,0,0,0
-c30,0,0,0,0,0
-c31,0,48.4445,0,48.4445,0
-c32,56.1955,36.5806,0,92.7761,0
-c33,0,0,0,0,0
-c34,0,0,0,0,0
-c35,0,0,48.5975,48.5975,0
-c36,0,0,0,0,0
-c37,0,0,0,0,0
-c38,0,0,0,0,0
-c39,56.1955,0,0,56.1955,0
-c40,56.1955,36.5806,0,92.7761,0
-c41,56.1955,36.5806,48.5975,141.3736,0
-c42,0,48.4445,0,48.4445,0
-c43,56.1955,36.5806,0,92.7761,0
-c44,0,0,0,0,0
-c45,56.1955,36.5806,0,92.7761,0
-c46,56.1955,36.5806,0,92.7761,0
-c47,56.1955,0,0,56.1955,0
-c48,56.1955,0,0,56.1955,0
-c49,56.1955,0,0,56.1955,0
-c50,0,0,0,0,0
-c51,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c1,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c2,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c3,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c4,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c5,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c6,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c7,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c8,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c9,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c10,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c11,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c12,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c13,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c14,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c15,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c16,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c17,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c18,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c19,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c20,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c21,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c22,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c23,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c24,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c25,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c26,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c27,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c28,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c29,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c30,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c31,3169.1308,1214.6493,18.850157,4402.630257,1.67021103379904
-c32,2995.2313,1231.1692,18.850157,4245.250657,1.73212896502641
-c33,18.850157,1194.5886,2989.6024,4203.041157,1.74952405897832
-c34,18.850157,1166.2048,2506.0827,3691.137657,1.99215588365807
-c35,18.850157,1194.5886,2554.6802,3768.118957,1.95145686449047
-c36,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c37,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c38,3169.1308,1194.5886,18.850157,4382.569557,1.67785622945091
-c39,2995.2313,1166.2048,18.850157,4180.286257,1.75904738862845
-c40,2995.2313,1231.1692,18.850157,4245.250657,1.73212896502641
-c41,2995.2313,1231.1692,2554.6802,6781.0807,1.08438787515996
-c42,3169.1308,1214.6493,18.850157,4402.630257,1.67021103379904
-c43,2995.2313,1231.1692,18.850157,4245.250657,1.73212896502641
-c44,18.850157,1194.5886,2989.6024,4203.041157,1.74952405897832
-c45,2995.2313,1231.1692,18.850157,4245.250657,1.73212896502641
-c46,2995.2313,1231.1692,18.850157,4245.250657,1.73212896502641
-c47,2995.2313,1166.2048,18.850157,4180.286257,1.75904738862845
-c48,2995.2313,1166.2048,2506.0827,6667.5188,1.10285728623881
-c49,2995.2313,1166.2048,18.850157,4180.286257,1.75904738862845
-c50,18.850157,1166.2048,2506.0827,3691.137657,1.99215588365807
-c51,3169.1308,1194.5886,18.850157,4382.569557,1.67785622945091
-c4,1203.905114
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c6,0,0,0,0,0
-c7,0,0,0,0,0
-c8,0,0,0,0,0
-c9,0,0,0,0,0
-c10,0,0,0,0,0
-c11,0,0,0,0,0
-c12,0,0,0,0,0
-c13,0,0,0,0,0
-c14,0,0,0,0,0
-c15,0,0,0,0,0
-c16,0,0,0,0,0
-c17,0,0,0,0,0
-c18,0,0,0,0,0
-c19,0,0,0,0,0
-c20,0,0,0,0,0
-c21,0,0,0,0,0
-c22,0,0,0,0,0
-c23,0,0,0,0,0
-c24,0,0,0,0,0
-c25,0,0,0,0,0
-c26,0,0,0,0,0
-c27,0,0,0,0,0
-c28,0,0,0,0,0
-c29,0,0,0,0,0
-c30,0,0,0,0,0
-c31,0,0,0,0,0
-c32,0,0,0,0,0
-c33,0,0,0,0,0
-c34,0,0,0,0,0
-c35,0,0,0,0,0
-c36,0,0,0,0,0
-c37,0,0,0,0,0
-c38,0,0,0,0,0
-c39,0,0,0,0,0
-c40,0,0,0,0,0
-c41,0,0,0,0,0
-c42,0,0,0,0,0
-c43,0,0,0,0,0
-c44,0,0,0,0,0
-c45,0,0,0,0,0
-c46,0,0,0,0,0
-c47,0,0,0,0,0
-c48,0,0,0,0,0
-c49,0,0,0,0,0
-c50,0,0,0,0,0
-c51,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_results1.csv b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_results1.csv
deleted file mode 100644
index 8be42209fe3813932175240022e7550b115fb013..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_results1.csv
+++ /dev/null
@@ -1,110 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c1,13625.25,4826.41,15.343114,18467.003114,2.12961862540779
-c2,13625.25,5713.149,15.343114,19353.742114,2.03204494330566
-c3,13625.25,5713.149,15.343114,19353.742114,2.03204494330566
-c4,13625.25,4826.41,15.343114,18467.003114,2.12961862540779
-c5,16268.68,4826.41,15.343114,21110.433114,1.86294964207171
-c1,18467.003114
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c1,2939.0358,1166.2048,7.919790,4113.16039,1.78775465189786
-c2,2939.0358,1194.5886,7.919790,4141.54419,1.77550239357696
-c3,2939.0358,1194.5886,7.919790,4141.54419,1.77550239357696
-c4,2939.0358,1166.2048,7.919790,4113.16039,1.78775465189786
-c5,3169.1308,1166.2048,7.919790,4343.25539,1.69304380479814
-c1,4113.16039
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c1,13721.3464,4826.41,90.458562,18638.214962,2.11005581109439
-c2,13721.3464,5866.014,90.458562,19677.818962,1.99857890125365
-c3,13721.3464,5866.014,90.458562,19677.818962,1.99857890125365
-c4,13721.3464,4826.41,90.458562,18638.214962,2.11005581109439
-c5,16268.68,5059.354,90.458562,21418.492562,1.83615507499152
-c1,18638.214962
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,28.951489,28.951489,0
-c2,0,0,28.951489,28.951489,0
-c3,0,0,28.951489,28.951489,0
-c4,0,0,28.951489,28.951489,0
-c5,0,0,28.951489,28.951489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,46.163959,46.163959,0
-c2,0,0,46.163959,46.163959,0
-c3,0,0,46.163959,46.163959,0
-c4,0,0,46.163959,46.163959,0
-c5,0,0,46.163959,46.163959,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,10.930367,10.930367,0
-c2,0,0,10.930367,10.930367,0
-c3,0,0,10.930367,10.930367,0
-c4,0,0,10.930367,10.930367,0
-c5,0,0,10.930367,10.930367,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,96.0964,0,0,96.0964,0
-c2,96.0964,152.865,0,248.9614,0
-c3,96.0964,152.865,0,248.9614,0
-c4,96.0964,0,0,96.0964,0
-c5,0,232.944,0,232.944,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,56.1955,0,0,56.1955,0
-c2,56.1955,36.5806,0,92.7761,0
-c3,56.1955,36.5806,0,92.7761,0
-c4,56.1955,0,0,56.1955,0
-c5,0,48.4445,0,48.4445,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c1,2995.2313,1166.2048,18.850157,4180.286257,1.75904738862845
-c2,2995.2313,1231.1692,18.850157,4245.250657,1.73212896502641
-c3,2995.2313,1231.1692,18.850157,4245.250657,1.73212896502641
-c4,2995.2313,1166.2048,18.850157,4180.286257,1.75904738862845
-c5,3169.1308,1214.6493,18.850157,4402.630257,1.67021103379904
-c1,4180.286257
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c5,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_results2.csv b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_results2.csv
deleted file mode 100644
index 3962cd8a7becc7eb4f032529b362789713a3e8d4..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_results2.csv
+++ /dev/null
@@ -1,99 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c1,15.343114,4826.41,11.525024,4853.278138,8.10332152236272
-c2,15.343114,5713.149,15.343114,5743.835228,6.84693619405936
-c3,15.343114,4826.41,15.343114,4857.096228,8.09695162380975
-c4,15.343114,5713.149,15.343114,5743.835228,6.84693619405936
-c1,4853.278138
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c1,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c2,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c3,7.919790,1166.2048,7.919790,1182.04438,6.2208503355136
-c4,7.919790,1194.5886,7.919790,1210.42818,6.07497521455794
-c1,1182.04438
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,16268.68,5713.149,17345.845,39327.674,0.999999997457261
-c1,90.458562,4826.41,86.640472,5003.509034,7.86001842841844
-c2,90.458562,5713.149,90.458562,5894.066124,6.67241807359781
-c3,90.458562,4826.41,90.458562,5007.327124,7.85402515967069
-c4,90.458562,5713.149,90.458562,5894.066124,6.67241807359781
-c1,5003.509034
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,28.951489,0,28.951489,57.902978,0
-c2,28.951489,0,28.951489,57.902978,0
-c3,28.951489,0,28.951489,57.902978,0
-c4,28.951489,0,28.951489,57.902978,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,46.163959,0,46.163959,92.327918,0
-c2,46.163959,0,46.163959,92.327918,0
-c3,46.163959,0,46.163959,92.327918,0
-c4,46.163959,0,46.163959,92.327918,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,10.930367,0,10.930367,21.860734,0
-c2,10.930367,0,10.930367,21.860734,0
-c3,10.930367,0,10.930367,21.860734,0
-c4,10.930367,0,10.930367,21.860734,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,3169.1308,1194.5886,2989.6024,7353.3218,0.999999986400704
-c1,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c2,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c3,18.850157,1166.2048,18.850157,1203.905114,6.10789098218824
-c4,18.850157,1194.5886,18.850157,1232.288914,5.96720551466346
-c1,1203.905114
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Total,Improvement
-c0,0,0,0,0,0
-c1,0,0,0,0,0
-c2,0,0,0,0,0
-c3,0,0,0,0,0
-c4,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_tensors.txt b/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_tensors.txt
deleted file mode 100644
index 9f9f7622734cbfa5fe8a93b9ba57d47c4a474881..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSM/pipeline_GSM_tensors.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-#Conv1,2
-Conv1,3133.21,16042.6,2906.31,13461.6,56.1955,96.0964,36.7417,222.237
-ClipRelu1,35.9208,226.08,32.7258,163.65,146.88,774.188,36.6148,179.907
-#Conv2,2
-Conv2,1158.82,5542.63,1133.43,4702.49,48.4445,232.944,36.5806,152.865
-ClipRelu2,35.7686,170.519,32.7748,123.92,146.27,552.927,36.484,139.599
-#Conv3,2
-Conv3,2954.1,17122.9,2473.6,13742.6,48.5975,185.44,36.5346,221.214
-ClipRelu3,35.5024,222.945,32.4827,162.777,146.047,772.606,36.3734,179.038
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_confs1.txt b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_confs1.txt
deleted file mode 100644
index 8e8d9f3412cb2b8697b1255f266a911b0de7fb5b..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_confs1.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-9 9,9 9,9 9,9 9 9
-8 8,8 8,8 8,8 8 8
-8 8,8 8,8 8,8 8 8
-8 8,8 8,8 8,8 8 8
-8 8,9 9,8 8,8 8 8
-8 8,8 8,8 8,8 8 8
-8 8,8 8,8 8,8 8 8
-8 8,8 8,8 8,8 8 8
-9 9,9 9,9 9,9 9 9
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_confs2.txt b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_confs2.txt
deleted file mode 100644
index f959aacca24ebf7e053b7942c15670ba9f4a6250..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_confs2.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-9 9,9 9,9 9,9 9 9
-7,8 8,8 8,8 8 8
-7,8 8,8 8,8 8 8
-7,8 8,8 8,8 8 8
-7,8 8,8 8,8 8 8
-7,9 9,8 8,8 8 8
-7,8 8,8 8,8 8 8
-7,8 8,8 8,8 8 8
-7,9 9,9 9,9 9 9
-7,9 9,9 9,8 8 8
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_fp16.csv b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_fp16.csv
deleted file mode 100644
index adf98595058033c6e719a7ab8ea4e0e0863b70fa..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_fp16.csv
+++ /dev/null
@@ -1,27 +0,0 @@
-Add1,32.7422,134.826,92.2852,42.5406,4117.81,2818.53,1299.28,0.480545,2.45071,1.88399,0.67206,44.7915,39.1725,10.2198
-Add1_f2h,146.433,615.386,452.33,163.055,4202.81,3089.28,1113.53,3.30273,12.1106,8.61188,3.58125,21.0664,19.9283,2.66236
-Add1_h2f,36.3565,149.573,99.4412,50.1316,4114.05,2735.15,1378.89,0.1795,1.48908,1.19317,0.397784,33.5047,28.1316,8.81403
-ClipRelu1,32.8093,164.115,122.311,41.8039,5002.18,3728.01,1274.17,0.383646,2.27289,1.77877,0.560485,50.4256,41.0598,12.0637
-ClipRelu1_f2h,146.021,769.892,612.882,157.01,5272.86,4197.58,1075.28,3.20992,14.4719,11.2309,3.26632,21.7023,19.7576,3.00022
-ClipRelu1_h2f,36.6263,179.956,130.385,49.5713,4913.3,3559.88,1353.42,0.306986,2.11226,1.60989,0.59845,41.1508,34.7001,9.69627
-ClipRelu2,32.6475,123.226,79.6998,43.5266,3774.46,2441.22,1333.23,0.223447,1.71043,1.36201,0.456888,46.0505,38.219,11.3045
-ClipRelu2_f2h,145.654,550.465,383.19,167.275,3779.64,2631.15,1148.48,3.23396,10.0407,6.6131,3.49054,19.7919,17.7688,3.39794
-ClipRelu2_h2f,36.5405,139.548,88.1952,51.353,3819.03,2413.65,1405.38,0.321831,1.60076,1.17453,0.528099,32.0447,26.5949,8.35867
-ClipRelu3,32.6239,163.406,121.952,41.4536,5008.81,3738.16,1270.65,0.252253,1.7972,1.46955,0.42257,41.824,36.9792,8.17033
-ClipRelu3_f2h,145.606,770.056,613.619,156.437,5289.04,4214.63,1074.41,3.44964,15.7416,12.2167,3.55151,21.0011,19.3086,3.1549
-ClipRelu3_h2f,36.4944,179.388,130.181,49.207,4916.63,3567.99,1348.65,0.558737,2.22449,1.73305,0.576189,92.4995,69.5845,24.4802
-ClipRelu4,33.7585,143.672,89.0642,54.6075,4255.92,2638.32,1617.6,0.301993,1.86943,1.33236,0.632059,45.4988,34.9092,13.4074
-ClipRelu4_f2h,49.1495,200.587,128.424,72.1624,4081.34,2613.1,1468.24,0.679613,2.60138,1.81128,1.01555,34.1158,30.7982,8.29963
-ClipRelu4_h2f,36.4394,154.389,93.9278,60.4616,4236.9,2577.66,1659.24,0.260819,1.49794,1.07238,0.537429,29.9756,24.2888,9.27329
-Conv1,2912.47,13477.7,10682,2795.64,4627.7,3667.8,959.897,16.874,19.5344,10.4216,12.2431,21.5177,19.3465,3.10583
-Conv1_f2h,56.6917,96.9595,23.2424,73.7171,1710.36,409.948,1300.41,0.996916,1.96889,1.07875,1.27633,23.3814,16.9444,15.6866
-Conv1_h2f,36.8048,222.547,189.687,32.8596,6047.08,5154.23,892.85,0.767615,5.8828,4.95376,0.934317,115.286,97.0952,18.4143
-Conv2,1136.75,4708.62,3430.35,1278.27,4142.27,3017.77,1124.5,5.26944,10.5517,10.949,4.42046,20.5193,19.0805,2.22352
-Conv2_f2h,49.0656,235.394,164.481,70.9135,4797.65,3352.36,1445.29,0.628751,3.06546,2.23285,0.944601,35.6073,30.5252,8.25377
-Conv2_h2f,36.5672,152.421,116.314,36.1071,4168.22,3180.81,987.413,0.17204,1.2502,0.951453,0.307672,25.1303,19.1077,6.35024
-Conv3,2476.66,13751.7,11364.3,2387.45,5552.57,4588.58,963.982,5.39738,10.1281,10.6951,4.05957,13.5026,12.5865,1.43885
-Conv3_f2h,49.3364,187.77,114.095,73.6744,3806.12,2312.79,1493.33,0.690044,2.50089,1.74511,1.02682,36.6426,32.9583,7.7257
-Conv3_h2f,36.496,221.431,188.804,32.6269,6067.25,5173.27,893.979,0.29224,2.18121,1.83368,0.35668,31.4194,26.5476,5.39186
-Conv4,1437.6,6507.38,4976.29,1531.09,4526.62,3461.58,1065.04,4.87199,11.4425,12.0961,4.02899,17.7563,16.7398,1.63006
-Conv4_f2h,49.0482,236.319,165.433,70.8861,4818.2,3372.97,1445.23,0.679402,3.35184,2.39746,1.04878,34.0698,29.9535,6.64119
-Conv4_h2f,36.5705,171.537,137.169,34.3684,4691.32,3751.38,939.935,0.503451,2.74819,2.18383,0.570163,87.6813,69.6122,18.1999
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_fp32.csv b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_fp32.csv
deleted file mode 100644
index 07f6d29eb8878988dd7385f39fc7dfc58c2ca13b..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_fp32.csv
+++ /dev/null
@@ -1,9 +0,0 @@
-Add1,36.8449,189.882,150.771,39.1102,5153.49,4092.03,1061.46,0.403419,2.58036,2.0136,0.575255,34.4887,26.8533,8.13957
-ClipRelu1,35.9733,226.348,190.976,35.3723,6292.16,5308.86,983.295,0.377922,2.50327,2.09712,0.419746,29.9011,24.7858,5.90174
-ClipRelu2,35.7905,170.526,130.612,39.9134,4764.52,3649.33,1115.19,0.263273,1.68219,1.29396,0.400556,27.1081,20.8148,6.87275
-ClipRelu3,35.7453,224.772,189.644,35.1276,6288.11,5305.4,982.715,0.26882,2.28079,1.92129,0.371073,37.9406,31.6551,6.78118
-ClipRelu4,35.5248,186.169,140.805,45.3642,5240.54,3963.56,1276.97,0.290282,1.78105,1.34507,0.450777,27.7985,20.973,7.55094
-Conv1,3133.72,16043.1,13048.7,2994.38,5119.56,4164.02,955.538,12.8997,20.1128,14.6161,10.0908,16.3762,15.0127,2.44283
-Conv2,1160.86,5545.29,4327.83,1217.46,4776.99,3728.23,1048.76,7.23604,11.931,7.55606,6.20326,22.2041,20.7609,2.04701
-Conv3,2955.11,17131.1,14376.3,2754.85,5797.15,4864.91,932.236,7.81069,12.3346,8.71001,7.15089,12.9919,12.528,1.32979
-Conv4,1532.34,7694.78,6158.49,1536.29,5021.69,4019.1,1002.58,7.70784,12.3714,7.83712,6.94529,19.857,18.8424,2.02751
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_layers.txt b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_layers.txt
deleted file mode 100644
index cb7b918f537fcbb77d6c71844162161ca21ca01a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_layers.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Conv1,2000,1,240,300,1,1,9,9,1,1
-Conv2,2000,1,240,300,1,1,3,3,1,1
-Conv3,2000,1,240,300,1,1,9,9,1,1
-Conv4,2000,1,240,300,1,1,5,5,1,1
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_ops.txt b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_ops.txt
deleted file mode 100644
index d80034c986694bf89a332c0a382f4f3281728537..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_ops.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-#Conv1,2
-Conv1
-ClipRelu1
-#Conv2,2
-Conv2
-ClipRelu2
-#Conv3,2
-Conv3
-ClipRelu3
-#Conv4,3
-Conv4
-Add1
-ClipRelu4
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_confs1.txt b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_confs1.txt
deleted file mode 100644
index 8e7edaca0182fd1a9b2f6e510e33db9c51b923f5..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_confs1.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-9 9,9 9,9 9,9 9 9
-8 8,8 8,9 9,9 9 9
-8 8,9 9,8 8,8 8 8
-8 8,9 9,9 9,8 8 8
-8 8,8 8,8 8,8 8 8
-8 8,8 8,9 9,9 9 9
-8 8,9 9,8 8,9 9 9
-8 8,9 9,9 9,9 9 9
-9 9,8 8,8 8,8 8 8
-8 8,8 8,8 8,9 9 9
-9 9,9 9,8 8,8 8 8
-9 9,9 9,8 8,9 9 9
-9 9,9 9,9 9,9 9 9
-8 8,9 9,9 9,9 9 9
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_confs2.txt b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_confs2.txt
deleted file mode 100644
index 4c0531b59728b06aa987b947d9ed062aa5f80b72..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_confs2.txt
+++ /dev/null
@@ -1,52 +0,0 @@
-9 9,9 9,9 9,9 9 9
-7,9 9,6,9 9 9
-7,8 8,6,9 9 9
-7,9 9,5,8 8 8
-5,9 9,7,9 9 9
-7,8 8,5,8 8 8
-5,9 9,7,8 8 8
-5,8 8,7,8 8 8
-6,9 9,6,8 8 8
-6,9 9,6,9 9 9
-6,8 8,6,8 8 8
-6,8 8,6,9 9 9
-9 9,8 8,4,8 8 8
-8 8,8 8,4,9 9 9
-9 9,8 8,4,9 9 9
-5,9 9,9 9,8 8 8
-5,9 9,8 8,8 8 8
-5,8 8,8 8,8 8 8
-5,8 8,8 8,9 9 9
-5,9 9,8 8,9 9 9
-5,9 9,9 9,8 8 8
-5,8 8,9 9,9 9 9
-5,8 8,9 9,8 8 8
-5,8 8,8 8,9 9 9
-5,9 9,9 9,9 9 9
-7,9 9,6,9 9 9
-8 8,9 9,6,8 8 8
-7,8 8,6,8 8 8
-7,8 8,8 8,9 9 9
-6,8 8,8 8,8 8 8
-7,9 9,6,8 8 8
-9 9,8 8,6,8 8 8
-9 9,9 9,6,8 8 8
-7,9 9,7,9 9 9
-6,8 8,8 8,9 9 9
-8 8,8 8,8 8,8 8 8
-8 8,8 8,8 8,9 9 9
-7,8 8,8 8,8 8 8
-8 8,8 8,6,8 8 8
-7,8 8,9 9,9 9 9
-7,8 8,9 9,8 8 8
-7,8 8,6,9 9 9
-6,8 8,9 9,9 9 9
-8 8,8 8,5,9 9 9
-8 8,8 8,5,8 8 8
-8 8,9 9,6,8 8 8
-7,8 8,7,8 8 8
-7,9 9,8 8,9 9 9
-6,9 9,8 8,9 9 9
-8 8,9 9,8 8,9 9 9
-7,9 9,7,8 8 8
-6,8 8,7,9 9 9
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_results1.csv b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_results1.csv
deleted file mode 100644
index 62918ed77910a43edb399dfc9a5e48e8fc956b37..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_results1.csv
+++ /dev/null
@@ -1,198 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,13641.815,4831.846,17355.872,8070.831,43900.364,1.07999029101447
-c2,13641.815,5715.816,13915.106,6785.878,40058.615,1.18356480576384
-c3,13641.815,5715.816,17355.872,6785.878,43499.381,1.08994578315966
-c4,13641.815,4831.846,13915.106,6785.878,39174.645,1.21027176835866
-c5,13641.815,4831.846,17355.872,8070.831,43900.364,1.07999029101447
-c6,13641.815,5715.816,13915.106,8070.831,41343.568,1.14677975750235
-c7,13641.815,5715.816,17355.872,8070.831,44784.334,1.05867303718601
-c8,16269.448,4831.846,13915.106,6785.878,41802.278,1.13419577006259
-c9,13641.815,4831.846,13915.106,8070.831,40459.598,1.1718348482557
-c10,16269.448,5715.816,13915.106,6785.878,42686.248,1.11070822830175
-c11,16269.448,5715.816,13915.106,8070.831,43971.201,1.07825044151455
-c12,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c13,13641.815,5715.816,17355.872,8070.831,44784.334,1.05867303718601
-c4,39174.645
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,2945.2793,1169.3975,2990.8553,1604.7097,8710.2418,1.02889321592779
-c2,2945.2793,1196.6505,2509.2839,1504.1007,8155.3144,1.09890413177812
-c3,2945.2793,1196.6505,2990.8553,1504.1007,8636.8858,1.03763195482298
-c4,2945.2793,1169.3975,2509.2839,1504.1007,8128.0614,1.10258870457612
-c5,2945.2793,1169.3975,2990.8553,1604.7097,8710.2418,1.02889321592779
-c6,2945.2793,1196.6505,2509.2839,1604.7097,8255.9234,1.08551257772677
-c7,2945.2793,1196.6505,2990.8553,1604.7097,8737.4948,1.02568400926906
-c8,3169.6933,1169.3975,2509.2839,1504.1007,8352.4754,1.07296439241276
-c9,2945.2793,1169.3975,2509.2839,1604.7097,8228.6704,1.08910774832945
-c10,3169.6933,1196.6505,2509.2839,1504.1007,8379.7284,1.06947484038415
-c11,3169.6933,1196.6505,2509.2839,1604.7097,8480.3374,1.05678680830804
-c12,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c13,2945.2793,1196.6505,2990.8553,1604.7097,8737.4948,1.02568400926906
-c4,8128.0614
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,13738.7745,4831.846,17577.303,8070.831,44218.7545,1.07221398315908
-c2,13738.7745,5868.237,14102.876,6785.878,40495.7655,1.17078826137812
-c3,13738.7745,5868.237,17355.872,7022.197,43985.0805,1.07791019939611
-c4,13738.7745,4831.846,13915.106,6785.878,39271.6045,1.20728367182634
-c5,13738.7745,4831.846,17577.303,8070.831,44218.7545,1.07221398315908
-c6,13738.7745,5868.237,14102.876,8242.368,41952.2555,1.1301410692206
-c7,13738.7745,5868.237,17355.872,8070.831,45033.7145,1.05281048701232
-c8,16269.448,5067.24,13915.106,6785.878,42037.672,1.1278447314403
-c9,13738.7745,4831.846,13915.106,8242.368,40728.0945,1.16410962667524
-c10,16269.448,5715.816,14102.876,6785.878,42874.018,1.10584379773819
-c11,16269.448,5715.816,14102.876,8242.368,44330.508,1.06951102146289
-c12,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c13,13738.7745,5868.237,17355.872,8070.831,45033.7145,1.05281048701232
-c4,39271.6045
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,96.9595,0,221.431,0,318.3905,0
-c2,96.9595,152.421,187.77,0,437.1505,0
-c3,96.9595,152.421,0,236.319,485.6995,0
-c4,96.9595,0,0,0,96.9595,0
-c5,96.9595,0,221.431,0,318.3905,0
-c6,96.9595,152.421,187.77,171.537,608.6875,0
-c7,96.9595,152.421,0,0,249.3805,0
-c8,0,235.394,0,0,235.394,0
-c9,96.9595,0,0,171.537,268.4965,0
-c10,0,0,187.77,0,187.77,0
-c11,0,0,187.77,171.537,359.307,0
-c12,0,0,0,0,0,0
-c13,96.9595,152.421,0,0,249.3805,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,56.6917,0,36.496,0,93.1877,0
-c2,56.6917,36.5672,49.3364,0,142.5953,0
-c3,56.6917,36.5672,0,49.0482,142.3071,0
-c4,56.6917,0,0,0,56.6917,0
-c5,56.6917,0,36.496,0,93.1877,0
-c6,56.6917,36.5672,49.3364,36.5705,179.1658,0
-c7,56.6917,36.5672,0,0,93.2589,0
-c8,0,49.0656,0,0,49.0656,0
-c9,56.6917,0,0,36.5705,93.2622,0
-c10,0,0,49.3364,0,49.3364,0
-c11,0,0,49.3364,36.5705,85.9069,0
-c12,0,0,0,0,0,0
-c13,56.6917,36.5672,0,0,93.2589,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,3001.971,1169.3975,3027.3513,1604.7097,8803.4295,1.01800198413582
-c2,3001.971,1233.2177,2558.6203,1504.1007,8297.9097,1.08002003106855
-c3,3001.971,1233.2177,2990.8553,1553.1489,8779.1929,1.02081236851724
-c4,3001.971,1169.3975,2509.2839,1504.1007,8184.7531,1.09495162297624
-c5,3001.971,1169.3975,3027.3513,1604.7097,8803.4295,1.01800198413582
-c6,3001.971,1233.2177,2558.6203,1641.2802,8435.0892,1.06245571104979
-c7,3001.971,1233.2177,2990.8553,1604.7097,8830.7537,1.01485207299064
-c8,3169.6933,1218.4631,2509.2839,1504.1007,8401.541,1.06669820373788
-c9,3001.971,1169.3975,2509.2839,1641.2802,8321.9326,1.07690234024603
-c10,3169.6933,1196.6505,2558.6203,1504.1007,8429.0648,1.06321506671517
-c11,3169.6933,1196.6505,2558.6203,1641.2802,8566.2443,1.04618878256614
-c12,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c13,3001.971,1233.2177,2990.8553,1604.7097,8830.7537,1.01485207299064
-c4,8184.7531
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_results2.csv b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_results2.csv
deleted file mode 100644
index 2923a28122195bfcd79bed384207073372a8eab9..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_promise_results2.csv
+++ /dev/null
@@ -1,616 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,15.343114,5715.816,11.525024,8070.831,13813.515138,3.43228831931014
-c2,15.343114,4831.846,11.525024,8070.831,12929.545138,3.66694776399838
-c3,15.343114,5715.816,7.585986,6785.878,12524.6231,3.78550046918777
-c4,7.585986,5715.816,15.343114,8070.831,13809.5761,3.43326734385955
-c5,15.343114,4831.846,7.585986,6785.878,11640.6531,4.07296447934726
-c6,7.585986,5715.816,15.343114,6785.878,12524.6231,3.78550046918777
-c7,7.585986,4831.846,15.343114,6785.878,11640.6531,4.07296447934726
-c8,11.525024,5715.816,11.525024,6785.878,12524.744048,3.78546391365375
-c9,11.525024,5715.816,11.525024,8070.831,13809.697048,3.43323727463976
-c10,11.525024,4831.846,11.525024,6785.878,11640.774048,4.07292216112155
-c11,11.525024,4831.846,11.525024,8070.831,12925.727048,3.66803093219681
-c12,16269.448,4831.846,6.799450,6785.878,27893.97145,1.6997209205227
-c13,13641.815,4831.846,6.799450,8070.831,26551.29145,1.78567460308725
-c14,16269.448,4831.846,6.799450,8070.831,29178.92445,1.62487026959326
-c15,7.585986,5715.816,17355.872,6785.878,29865.151986,1.58753475835221
-c16,7.585986,5715.816,13915.106,6785.878,26424.385986,1.79425046416195
-c17,7.585986,4831.846,13915.106,6785.878,25540.415986,1.85635061074784
-c18,7.585986,4831.846,13915.106,8070.831,26825.368986,1.7674301832717
-c19,7.585986,5715.816,13915.106,8070.831,27709.338986,1.71104647616639
-c20,7.585986,5715.816,17355.872,6785.878,29865.151986,1.58753475835221
-c21,7.585986,4831.846,17355.872,8070.831,30266.134986,1.56650219346741
-c22,7.585986,4831.846,17355.872,6785.878,28981.181986,1.635956975782
-c23,7.585986,4831.846,13915.106,8070.831,26825.368986,1.7674301832717
-c24,7.585986,5715.816,17355.872,8070.831,31150.104986,1.52204838054651
-c25,15.343114,5715.816,11.525024,8070.831,13813.515138,3.43228831931014
-c26,13641.815,5715.816,11.525024,6785.878,26155.034024,1.81272816449872
-c27,15.343114,4831.846,11.525024,6785.878,11644.592138,4.0715867100335
-c28,15.343114,4831.846,13915.106,8070.831,26833.126114,1.76691924086218
-c29,11.525024,4831.846,13915.106,6785.878,25544.355024,1.85606435432987
-c30,15.343114,5715.816,11.525024,6785.878,12528.562138,3.7843102903058
-c31,16269.448,4831.846,11.525024,6785.878,27898.697024,1.69943301614661
-c32,16269.448,5715.816,11.525024,6785.878,28782.667024,1.64724022258751
-c33,15.343114,5715.816,15.343114,8070.831,13817.333228,3.43133988842279
-c34,11.525024,4831.846,13915.106,8070.831,26829.308024,1.76717069187438
-c35,13641.815,4831.846,13915.106,6785.878,39174.645,1.21027176835866
-c36,13641.815,4831.846,13915.106,8070.831,40459.598,1.1718348482557
-c37,15.343114,4831.846,13915.106,6785.878,25548.173114,1.85578697165005
-c38,13641.815,4831.846,11.525024,6785.878,25271.064024,1.87613654760872
-c39,15.343114,4831.846,17355.872,8070.831,30273.892114,1.56610080609571
-c40,15.343114,4831.846,17355.872,6785.878,28988.939114,1.63551921131018
-c41,15.343114,4831.846,11.525024,8070.831,12929.545138,3.66694776399838
-c42,11.525024,4831.846,17355.872,8070.831,30270.074024,1.56629834488607
-c43,13641.815,4831.846,7.585986,8070.831,26552.077986,1.78562170713857
-c44,13641.815,4831.846,7.585986,6785.878,25267.124986,1.87642902936631
-c45,13641.815,5715.816,11.525024,6785.878,26155.034024,1.81272816449872
-c46,15.343114,4831.846,15.343114,6785.878,11648.410228,4.07025213440781
-c47,15.343114,5715.816,13915.106,8070.831,27717.096114,1.71056760902868
-c48,11.525024,5715.816,13915.106,8070.831,27713.278024,1.71080327588315
-c49,13641.815,5715.816,13915.106,8070.831,41343.568,1.14677975750235
-c50,15.343114,5715.816,15.343114,6785.878,12532.380228,3.78315736987902
-c51,11.525024,4831.846,15.343114,8070.831,12929.545138,3.66694776399838
-c5,11640.6531
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,7.919790,1196.6505,7.919790,1604.7097,2817.19978,3.18114055861737
-c2,7.919790,1169.3975,7.919790,1604.7097,2789.94678,3.21221485048489
-c3,7.919790,1196.6505,7.919790,1504.1007,2716.59078,3.29895416567106
-c4,7.919790,1196.6505,7.919790,1604.7097,2817.19978,3.18114055861737
-c5,7.919790,1169.3975,7.919790,1504.1007,2689.33778,3.33238484708362
-c6,7.919790,1196.6505,7.919790,1504.1007,2716.59078,3.29895416567106
-c7,7.919790,1169.3975,7.919790,1504.1007,2689.33778,3.33238484708362
-c8,7.919790,1196.6505,7.919790,1504.1007,2716.59078,3.29895416567106
-c9,7.919790,1196.6505,7.919790,1604.7097,2817.19978,3.18114055861737
-c10,7.919790,1169.3975,7.919790,1504.1007,2689.33778,3.33238484708362
-c11,7.919790,1169.3975,7.919790,1604.7097,2789.94678,3.21221485048489
-c12,3169.6933,1169.3975,7.919790,1504.1007,5851.11129,1.53165923576786
-c13,2945.2793,1169.3975,7.919790,1604.7097,5727.30629,1.56476852987081
-c14,3169.6933,1169.3975,7.919790,1604.7097,5951.72029,1.50576778019641
-c15,7.919790,1196.6505,2990.8553,1504.1007,5699.52629,1.57239535125655
-c16,7.919790,1196.6505,2509.2839,1504.1007,5217.95489,1.71751362692379
-c17,7.919790,1169.3975,2509.2839,1504.1007,5190.70189,1.72653117386922
-c18,7.919790,1169.3975,2509.2839,1604.7097,5291.31089,1.6937029059409
-c19,7.919790,1196.6505,2509.2839,1604.7097,5318.56389,1.68502415630427
-c20,7.919790,1196.6505,2990.8553,1504.1007,5699.52629,1.57239535125655
-c21,7.919790,1169.3975,2990.8553,1604.7097,5772.88229,1.55241492803043
-c22,7.919790,1169.3975,2990.8553,1504.1007,5672.27329,1.57995008065011
-c23,7.919790,1169.3975,2509.2839,1604.7097,5291.31089,1.6937029059409
-c24,7.919790,1196.6505,2990.8553,1604.7097,5800.13529,1.54512062174466
-c25,7.919790,1196.6505,7.919790,1604.7097,2817.19978,3.18114055861737
-c26,2945.2793,1196.6505,7.919790,1504.1007,5653.95029,1.58507029277277
-c27,7.919790,1169.3975,7.919790,1504.1007,2689.33778,3.33238484708362
-c28,7.919790,1169.3975,2509.2839,1604.7097,5291.31089,1.6937029059409
-c29,7.919790,1169.3975,2509.2839,1504.1007,5190.70189,1.72653117386922
-c30,7.919790,1196.6505,7.919790,1504.1007,2716.59078,3.29895416567106
-c31,3169.6933,1169.3975,7.919790,1504.1007,5851.11129,1.53165923576786
-c32,3169.6933,1196.6505,7.919790,1504.1007,5878.36429,1.5245582283476
-c33,7.919790,1196.6505,7.919790,1604.7097,2817.19978,3.18114055861737
-c34,7.919790,1169.3975,2509.2839,1604.7097,5291.31089,1.6937029059409
-c35,2945.2793,1169.3975,2509.2839,1504.1007,8128.0614,1.10258870457612
-c36,2945.2793,1169.3975,2509.2839,1604.7097,8228.6704,1.08910774832945
-c37,7.919790,1169.3975,2509.2839,1504.1007,5190.70189,1.72653117386922
-c38,2945.2793,1169.3975,7.919790,1504.1007,5626.69729,1.59274760642495
-c39,7.919790,1169.3975,2990.8553,1604.7097,5772.88229,1.55241492803043
-c40,7.919790,1169.3975,2990.8553,1504.1007,5672.27329,1.57995008065011
-c41,7.919790,1169.3975,7.919790,1604.7097,2789.94678,3.21221485048489
-c42,7.919790,1169.3975,2990.8553,1604.7097,5772.88229,1.55241492803043
-c43,2945.2793,1169.3975,7.919790,1604.7097,5727.30629,1.56476852987081
-c44,2945.2793,1169.3975,7.919790,1504.1007,5626.69729,1.59274760642495
-c45,2945.2793,1196.6505,7.919790,1504.1007,5653.95029,1.58507029277277
-c46,7.919790,1169.3975,7.919790,1504.1007,2689.33778,3.33238484708362
-c47,7.919790,1196.6505,2509.2839,1604.7097,5318.56389,1.68502415630427
-c48,7.919790,1196.6505,2509.2839,1604.7097,5318.56389,1.68502415630427
-c49,2945.2793,1196.6505,2509.2839,1604.7097,8255.9234,1.08551257772677
-c50,7.919790,1196.6505,7.919790,1504.1007,2716.59078,3.29895416567106
-c51,7.919790,1169.3975,7.919790,1604.7097,2789.94678,3.21221485048489
-c5,2689.33778
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,90.458562,5715.816,86.640472,8070.831,13963.746034,3.39536157024208
-c2,90.458562,4831.846,86.640472,8070.831,13079.776034,3.62483015873305
-c3,90.458562,5715.816,82.701434,6785.878,12674.853996,3.74063217145533
-c4,82.701434,5715.816,90.458562,8070.831,13959.806996,3.39631963922949
-c5,90.458562,4831.846,82.701434,6785.878,11790.883996,4.02106971911328
-c6,82.701434,5715.816,90.458562,6785.878,12674.853996,3.74063217145533
-c7,82.701434,4831.846,90.458562,6785.878,11790.883996,4.02106971911328
-c8,86.640472,5715.816,86.640472,6785.878,12674.974944,3.74059647734325
-c9,86.640472,5715.816,86.640472,8070.831,13959.927944,3.3962902137148
-c10,86.640472,4831.846,86.640472,6785.878,11791.004944,4.02102847238846
-c11,86.640472,4831.846,86.640472,8070.831,13075.957944,3.62588858425982
-c12,16269.448,5067.24,81.914898,6785.878,28204.480898,1.68100831223812
-c13,13738.7745,4831.846,81.914898,8070.831,26723.366398,1.77417643108507
-c14,16269.448,5067.24,81.914898,8070.831,29489.433898,1.60776117314478
-c15,82.701434,5715.816,17355.872,7022.197,30176.586434,1.57115076440408
-c16,82.701434,5715.816,14102.876,6785.878,26687.271434,1.77657603324478
-c17,82.701434,4831.846,13915.106,6785.878,25615.531434,1.8509070146395
-c18,82.701434,4831.846,13915.106,8242.368,27072.021434,1.75132717519654
-c19,82.701434,5715.816,14102.876,8242.368,28143.761434,1.68463504577106
-c20,82.701434,5715.816,17355.872,7022.197,30176.586434,1.57115076440408
-c21,82.701434,4831.846,17577.303,8070.831,30562.681434,1.5513025893116
-c22,82.701434,4831.846,17577.303,7022.197,29514.047434,1.60642036458679
-c23,82.701434,4831.846,13915.106,8242.368,27072.021434,1.75132717519654
-c24,82.701434,5715.816,17355.872,8070.831,31225.220434,1.51838693816029
-c25,90.458562,5715.816,86.640472,8070.831,13963.746034,3.39536157024208
-c26,13738.7745,5868.237,86.640472,6785.878,26479.529972,1.79051391286337
-c27,90.458562,4831.846,86.640472,6785.878,11794.823034,4.0197268294197
-c28,90.458562,4831.846,13915.106,8242.368,27079.778562,1.75082549941707
-c29,86.640472,4831.846,13915.106,6785.878,25619.470472,1.85062243447831
-c30,90.458562,5715.816,86.640472,6785.878,12678.793034,3.7394700346406
-c31,16269.448,5067.24,86.640472,6785.878,28209.206472,1.68072671164953
-c32,16269.448,5715.816,86.640472,6785.878,28857.782472,1.6429525339207
-c33,90.458562,5715.816,90.458562,8070.831,13967.564124,3.39443343446623
-c34,86.640472,4831.846,13915.106,8242.368,27075.960472,1.75107239035612
-c35,13738.7745,4831.846,13915.106,6785.878,39271.6045,1.20728367182634
-c36,13738.7745,4831.846,13915.106,8242.368,40728.0945,1.16410962667524
-c37,90.458562,4831.846,13915.106,6785.878,25623.288562,1.85034667584701
-c38,13738.7745,4831.846,86.640472,6785.878,25443.138972,1.86344801503587
-c39,90.458562,4831.846,17577.303,8070.831,30570.438562,1.55090895241011
-c40,90.458562,4831.846,17577.303,7022.197,29521.804562,1.60599826273588
-c41,90.458562,4831.846,86.640472,8070.831,13079.776034,3.62483015873305
-c42,86.640472,4831.846,17577.303,8070.831,30566.620472,1.55110267712849
-c43,13738.7745,4831.846,82.701434,8070.831,26724.152934,1.77412421413991
-c44,13738.7745,4831.846,82.701434,6785.878,25439.199934,1.86373655369009
-c45,13738.7745,5868.237,86.640472,6785.878,26479.529972,1.79051391286337
-c46,90.458562,4831.846,90.458562,6785.878,11798.641124,4.01842602888524
-c47,90.458562,5715.816,14102.876,8242.368,28151.518562,1.6841708459586
-c48,86.640472,5715.816,14102.876,8242.368,28147.700472,1.68439929502317
-c49,13738.7745,5868.237,14102.876,8242.368,41952.2555,1.1301410692206
-c50,90.458562,5715.816,90.458562,6785.878,12682.611124,3.73834427016731
-c51,86.640472,4831.846,90.458562,8070.831,13079.776034,3.62483015873305
-c5,11790.883996
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,28.951489,0,28.951489,0,57.902978,0
-c2,28.951489,0,28.951489,0,57.902978,0
-c3,28.951489,0,28.951489,0,57.902978,0
-c4,28.951489,0,28.951489,0,57.902978,0
-c5,28.951489,0,28.951489,0,57.902978,0
-c6,28.951489,0,28.951489,0,57.902978,0
-c7,28.951489,0,28.951489,0,57.902978,0
-c8,28.951489,0,28.951489,0,57.902978,0
-c9,28.951489,0,28.951489,0,57.902978,0
-c10,28.951489,0,28.951489,0,57.902978,0
-c11,28.951489,0,28.951489,0,57.902978,0
-c12,0,0,28.951489,0,28.951489,0
-c13,0,0,28.951489,0,28.951489,0
-c14,0,0,28.951489,0,28.951489,0
-c15,28.951489,0,0,0,28.951489,0
-c16,28.951489,0,0,0,28.951489,0
-c17,28.951489,0,0,0,28.951489,0
-c18,28.951489,0,0,0,28.951489,0
-c19,28.951489,0,0,0,28.951489,0
-c20,28.951489,0,0,0,28.951489,0
-c21,28.951489,0,0,0,28.951489,0
-c22,28.951489,0,0,0,28.951489,0
-c23,28.951489,0,0,0,28.951489,0
-c24,28.951489,0,0,0,28.951489,0
-c25,28.951489,0,28.951489,0,57.902978,0
-c26,0,0,28.951489,0,28.951489,0
-c27,28.951489,0,28.951489,0,57.902978,0
-c28,28.951489,0,0,0,28.951489,0
-c29,28.951489,0,0,0,28.951489,0
-c30,28.951489,0,28.951489,0,57.902978,0
-c31,0,0,28.951489,0,28.951489,0
-c32,0,0,28.951489,0,28.951489,0
-c33,28.951489,0,28.951489,0,57.902978,0
-c34,28.951489,0,0,0,28.951489,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,28.951489,0,0,0,28.951489,0
-c38,0,0,28.951489,0,28.951489,0
-c39,28.951489,0,0,0,28.951489,0
-c40,28.951489,0,0,0,28.951489,0
-c41,28.951489,0,28.951489,0,57.902978,0
-c42,28.951489,0,0,0,28.951489,0
-c43,0,0,28.951489,0,28.951489,0
-c44,0,0,28.951489,0,28.951489,0
-c45,0,0,28.951489,0,28.951489,0
-c46,28.951489,0,28.951489,0,57.902978,0
-c47,28.951489,0,0,0,28.951489,0
-c48,28.951489,0,0,0,28.951489,0
-c49,0,0,0,0,0,0
-c50,28.951489,0,28.951489,0,57.902978,0
-c51,28.951489,0,28.951489,0,57.902978,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,46.163959,0,46.163959,0,92.327918,0
-c2,46.163959,0,46.163959,0,92.327918,0
-c3,46.163959,0,46.163959,0,92.327918,0
-c4,46.163959,0,46.163959,0,92.327918,0
-c5,46.163959,0,46.163959,0,92.327918,0
-c6,46.163959,0,46.163959,0,92.327918,0
-c7,46.163959,0,46.163959,0,92.327918,0
-c8,46.163959,0,46.163959,0,92.327918,0
-c9,46.163959,0,46.163959,0,92.327918,0
-c10,46.163959,0,46.163959,0,92.327918,0
-c11,46.163959,0,46.163959,0,92.327918,0
-c12,0,0,46.163959,0,46.163959,0
-c13,0,0,46.163959,0,46.163959,0
-c14,0,0,46.163959,0,46.163959,0
-c15,46.163959,0,0,0,46.163959,0
-c16,46.163959,0,0,0,46.163959,0
-c17,46.163959,0,0,0,46.163959,0
-c18,46.163959,0,0,0,46.163959,0
-c19,46.163959,0,0,0,46.163959,0
-c20,46.163959,0,0,0,46.163959,0
-c21,46.163959,0,0,0,46.163959,0
-c22,46.163959,0,0,0,46.163959,0
-c23,46.163959,0,0,0,46.163959,0
-c24,46.163959,0,0,0,46.163959,0
-c25,46.163959,0,46.163959,0,92.327918,0
-c26,0,0,46.163959,0,46.163959,0
-c27,46.163959,0,46.163959,0,92.327918,0
-c28,46.163959,0,0,0,46.163959,0
-c29,46.163959,0,0,0,46.163959,0
-c30,46.163959,0,46.163959,0,92.327918,0
-c31,0,0,46.163959,0,46.163959,0
-c32,0,0,46.163959,0,46.163959,0
-c33,46.163959,0,46.163959,0,92.327918,0
-c34,46.163959,0,0,0,46.163959,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,46.163959,0,0,0,46.163959,0
-c38,0,0,46.163959,0,46.163959,0
-c39,46.163959,0,0,0,46.163959,0
-c40,46.163959,0,0,0,46.163959,0
-c41,46.163959,0,46.163959,0,92.327918,0
-c42,46.163959,0,0,0,46.163959,0
-c43,0,0,46.163959,0,46.163959,0
-c44,0,0,46.163959,0,46.163959,0
-c45,0,0,46.163959,0,46.163959,0
-c46,46.163959,0,46.163959,0,92.327918,0
-c47,46.163959,0,0,0,46.163959,0
-c48,46.163959,0,0,0,46.163959,0
-c49,0,0,0,0,0,0
-c50,46.163959,0,46.163959,0,92.327918,0
-c51,46.163959,0,46.163959,0,92.327918,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,10.930367,0,10.930367,0,21.860734,0
-c2,10.930367,0,10.930367,0,21.860734,0
-c3,10.930367,0,10.930367,0,21.860734,0
-c4,10.930367,0,10.930367,0,21.860734,0
-c5,10.930367,0,10.930367,0,21.860734,0
-c6,10.930367,0,10.930367,0,21.860734,0
-c7,10.930367,0,10.930367,0,21.860734,0
-c8,10.930367,0,10.930367,0,21.860734,0
-c9,10.930367,0,10.930367,0,21.860734,0
-c10,10.930367,0,10.930367,0,21.860734,0
-c11,10.930367,0,10.930367,0,21.860734,0
-c12,0,0,10.930367,0,10.930367,0
-c13,0,0,10.930367,0,10.930367,0
-c14,0,0,10.930367,0,10.930367,0
-c15,10.930367,0,0,0,10.930367,0
-c16,10.930367,0,0,0,10.930367,0
-c17,10.930367,0,0,0,10.930367,0
-c18,10.930367,0,0,0,10.930367,0
-c19,10.930367,0,0,0,10.930367,0
-c20,10.930367,0,0,0,10.930367,0
-c21,10.930367,0,0,0,10.930367,0
-c22,10.930367,0,0,0,10.930367,0
-c23,10.930367,0,0,0,10.930367,0
-c24,10.930367,0,0,0,10.930367,0
-c25,10.930367,0,10.930367,0,21.860734,0
-c26,0,0,10.930367,0,10.930367,0
-c27,10.930367,0,10.930367,0,21.860734,0
-c28,10.930367,0,0,0,10.930367,0
-c29,10.930367,0,0,0,10.930367,0
-c30,10.930367,0,10.930367,0,21.860734,0
-c31,0,0,10.930367,0,10.930367,0
-c32,0,0,10.930367,0,10.930367,0
-c33,10.930367,0,10.930367,0,21.860734,0
-c34,10.930367,0,0,0,10.930367,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,10.930367,0,0,0,10.930367,0
-c38,0,0,10.930367,0,10.930367,0
-c39,10.930367,0,0,0,10.930367,0
-c40,10.930367,0,0,0,10.930367,0
-c41,10.930367,0,10.930367,0,21.860734,0
-c42,10.930367,0,0,0,10.930367,0
-c43,0,0,10.930367,0,10.930367,0
-c44,0,0,10.930367,0,10.930367,0
-c45,0,0,10.930367,0,10.930367,0
-c46,10.930367,0,10.930367,0,21.860734,0
-c47,10.930367,0,0,0,10.930367,0
-c48,10.930367,0,0,0,10.930367,0
-c49,0,0,0,0,0,0
-c50,10.930367,0,10.930367,0,21.860734,0
-c51,10.930367,0,10.930367,0,21.860734,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,0,0,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,0,0,0,0,0,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,0,0,0
-c28,0,0,0,0,0,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,0,0,0,0,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,0,0,0,0,0,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,0,0,0,0,0,0
-c38,0,0,0,0,0,0
-c39,0,0,0,0,0,0
-c40,0,0,0,0,0,0
-c41,0,0,0,0,0,0
-c42,0,0,0,0,0,0
-c43,0,0,0,0,0,0
-c44,0,0,0,0,0,0
-c45,0,0,0,0,0,0
-c46,0,0,0,0,0,0
-c47,0,0,0,0,0,0
-c48,0,0,0,0,0,0
-c49,0,0,0,0,0,0
-c50,0,0,0,0,0,0
-c51,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,235.394,0,0,235.394,0
-c13,96.9595,0,0,0,96.9595,0
-c14,0,235.394,0,0,235.394,0
-c15,0,0,0,236.319,236.319,0
-c16,0,0,187.77,0,187.77,0
-c17,0,0,0,0,0,0
-c18,0,0,0,171.537,171.537,0
-c19,0,0,187.77,171.537,359.307,0
-c20,0,0,0,236.319,236.319,0
-c21,0,0,221.431,0,221.431,0
-c22,0,0,221.431,236.319,457.75,0
-c23,0,0,0,171.537,171.537,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,96.9595,152.421,0,0,249.3805,0
-c27,0,0,0,0,0,0
-c28,0,0,0,171.537,171.537,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,235.394,0,0,235.394,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,0,0,0,171.537,171.537,0
-c35,96.9595,0,0,0,96.9595,0
-c36,96.9595,0,0,171.537,268.4965,0
-c37,0,0,0,0,0,0
-c38,96.9595,0,0,0,96.9595,0
-c39,0,0,221.431,0,221.431,0
-c40,0,0,221.431,236.319,457.75,0
-c41,0,0,0,0,0,0
-c42,0,0,221.431,0,221.431,0
-c43,96.9595,0,0,0,96.9595,0
-c44,96.9595,0,0,0,96.9595,0
-c45,96.9595,152.421,0,0,249.3805,0
-c46,0,0,0,0,0,0
-c47,0,0,187.77,171.537,359.307,0
-c48,0,0,187.77,171.537,359.307,0
-c49,96.9595,152.421,187.77,171.537,608.6875,0
-c50,0,0,0,0,0,0
-c51,0,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,49.0656,0,0,49.0656,0
-c13,56.6917,0,0,0,56.6917,0
-c14,0,49.0656,0,0,49.0656,0
-c15,0,0,0,49.0482,49.0482,0
-c16,0,0,49.3364,0,49.3364,0
-c17,0,0,0,0,0,0
-c18,0,0,0,36.5705,36.5705,0
-c19,0,0,49.3364,36.5705,85.9069,0
-c20,0,0,0,49.0482,49.0482,0
-c21,0,0,36.496,0,36.496,0
-c22,0,0,36.496,49.0482,85.5442,0
-c23,0,0,0,36.5705,36.5705,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,56.6917,36.5672,0,0,93.2589,0
-c27,0,0,0,0,0,0
-c28,0,0,0,36.5705,36.5705,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,49.0656,0,0,49.0656,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,0,0,0,36.5705,36.5705,0
-c35,56.6917,0,0,0,56.6917,0
-c36,56.6917,0,0,36.5705,93.2622,0
-c37,0,0,0,0,0,0
-c38,56.6917,0,0,0,56.6917,0
-c39,0,0,36.496,0,36.496,0
-c40,0,0,36.496,49.0482,85.5442,0
-c41,0,0,0,0,0,0
-c42,0,0,36.496,0,36.496,0
-c43,56.6917,0,0,0,56.6917,0
-c44,56.6917,0,0,0,56.6917,0
-c45,56.6917,36.5672,0,0,93.2589,0
-c46,0,0,0,0,0,0
-c47,0,0,49.3364,36.5705,85.9069,0
-c48,0,0,49.3364,36.5705,85.9069,0
-c49,56.6917,36.5672,49.3364,36.5705,179.1658,0
-c50,0,0,0,0,0,0
-c51,0,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,18.850157,1196.6505,18.850157,1604.7097,2839.060514,3.15664581298721
-c2,18.850157,1169.3975,18.850157,1604.7097,2811.807514,3.18724110261976
-c3,18.850157,1196.6505,18.850157,1504.1007,2738.451514,3.27261900636963
-c4,18.850157,1196.6505,18.850157,1604.7097,2839.060514,3.15664581298721
-c5,18.850157,1169.3975,18.850157,1504.1007,2711.198514,3.30551541068322
-c6,18.850157,1196.6505,18.850157,1504.1007,2738.451514,3.27261900636963
-c7,18.850157,1169.3975,18.850157,1504.1007,2711.198514,3.30551541068322
-c8,18.850157,1196.6505,18.850157,1504.1007,2738.451514,3.27261900636963
-c9,18.850157,1196.6505,18.850157,1604.7097,2839.060514,3.15664581298721
-c10,18.850157,1169.3975,18.850157,1504.1007,2711.198514,3.30551541068322
-c11,18.850157,1169.3975,18.850157,1604.7097,2811.807514,3.18724110261976
-c12,3169.6933,1218.4631,18.850157,1504.1007,5911.107257,1.51611335385192
-c13,3001.971,1169.3975,18.850157,1604.7097,5794.928357,1.54650896322532
-c14,3169.6933,1218.4631,18.850157,1604.7097,6011.716257,1.49074045876513
-c15,18.850157,1196.6505,2990.8553,1553.1489,5759.504857,1.55602067658747
-c16,18.850157,1196.6505,2558.6203,1504.1007,5278.221657,1.69790304625126
-c17,18.850157,1169.3975,2509.2839,1504.1007,5201.632257,1.72290315518737
-c18,18.850157,1169.3975,2509.2839,1641.2802,5338.811757,1.67863356867494
-c19,18.850157,1196.6505,2558.6203,1641.2802,5415.401157,1.65489284629015
-c20,18.850157,1196.6505,2990.8553,1553.1489,5759.504857,1.55602067658747
-c21,18.850157,1169.3975,3027.3513,1604.7097,5820.308657,1.53976518672169
-c22,18.850157,1169.3975,3027.3513,1553.1489,5768.747857,1.55352753609651
-c23,18.850157,1169.3975,2509.2839,1641.2802,5338.811757,1.67863356867494
-c24,18.850157,1196.6505,2990.8553,1604.7097,5811.065657,1.54221431571386
-c25,18.850157,1196.6505,18.850157,1604.7097,2839.060514,3.15664581298721
-c26,3001.971,1233.2177,18.850157,1504.1007,5758.139557,1.55638962127382
-c27,18.850157,1169.3975,18.850157,1504.1007,2711.198514,3.30551541068322
-c28,18.850157,1169.3975,2509.2839,1641.2802,5338.811757,1.67863356867494
-c29,18.850157,1169.3975,2509.2839,1504.1007,5201.632257,1.72290315518737
-c30,18.850157,1196.6505,18.850157,1504.1007,2738.451514,3.27261900636963
-c31,3169.6933,1218.4631,18.850157,1504.1007,5911.107257,1.51611335385192
-c32,3169.6933,1196.6505,18.850157,1504.1007,5889.294657,1.52172869074823
-c33,18.850157,1196.6505,18.850157,1604.7097,2839.060514,3.15664581298721
-c34,18.850157,1169.3975,2509.2839,1641.2802,5338.811757,1.67863356867494
-c35,3001.971,1169.3975,2509.2839,1504.1007,8184.7531,1.09495162297624
-c36,3001.971,1169.3975,2509.2839,1641.2802,8321.9326,1.07690234024603
-c37,18.850157,1169.3975,2509.2839,1504.1007,5201.632257,1.72290315518737
-c38,3001.971,1169.3975,18.850157,1504.1007,5694.319357,1.5738331626237
-c39,18.850157,1169.3975,3027.3513,1604.7097,5820.308657,1.53976518672169
-c40,18.850157,1169.3975,3027.3513,1553.1489,5768.747857,1.55352753609651
-c41,18.850157,1169.3975,18.850157,1604.7097,2811.807514,3.18724110261976
-c42,18.850157,1169.3975,3027.3513,1604.7097,5820.308657,1.53976518672169
-c43,3001.971,1169.3975,18.850157,1604.7097,5794.928357,1.54650896322532
-c44,3001.971,1169.3975,18.850157,1504.1007,5694.319357,1.5738331626237
-c45,3001.971,1233.2177,18.850157,1504.1007,5758.139557,1.55638962127382
-c46,18.850157,1169.3975,18.850157,1504.1007,2711.198514,3.30551541068322
-c47,18.850157,1196.6505,2558.6203,1641.2802,5415.401157,1.65489284629015
-c48,18.850157,1196.6505,2558.6203,1641.2802,5415.401157,1.65489284629015
-c49,3001.971,1233.2177,2558.6203,1641.2802,8435.0892,1.06245571104979
-c50,18.850157,1196.6505,18.850157,1504.1007,2738.451514,3.27261900636963
-c51,18.850157,1169.3975,18.850157,1604.7097,2811.807514,3.18724110261976
-c5,2711.198514
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c10,0,0,0,0,0,0
-c11,0,0,0,0,0,0
-c12,0,0,0,0,0,0
-c13,0,0,0,0,0,0
-c14,0,0,0,0,0,0
-c15,0,0,0,0,0,0
-c16,0,0,0,0,0,0
-c17,0,0,0,0,0,0
-c18,0,0,0,0,0,0
-c19,0,0,0,0,0,0
-c20,0,0,0,0,0,0
-c21,0,0,0,0,0,0
-c22,0,0,0,0,0,0
-c23,0,0,0,0,0,0
-c24,0,0,0,0,0,0
-c25,0,0,0,0,0,0
-c26,0,0,0,0,0,0
-c27,0,0,0,0,0,0
-c28,0,0,0,0,0,0
-c29,0,0,0,0,0,0
-c30,0,0,0,0,0,0
-c31,0,0,0,0,0,0
-c32,0,0,0,0,0,0
-c33,0,0,0,0,0,0
-c34,0,0,0,0,0,0
-c35,0,0,0,0,0,0
-c36,0,0,0,0,0,0
-c37,0,0,0,0,0,0
-c38,0,0,0,0,0,0
-c39,0,0,0,0,0,0
-c40,0,0,0,0,0,0
-c41,0,0,0,0,0,0
-c42,0,0,0,0,0,0
-c43,0,0,0,0,0,0
-c44,0,0,0,0,0,0
-c45,0,0,0,0,0,0
-c46,0,0,0,0,0,0
-c47,0,0,0,0,0,0
-c48,0,0,0,0,0,0
-c49,0,0,0,0,0,0
-c50,0,0,0,0,0,0
-c51,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_results1.csv b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_results1.csv
deleted file mode 100644
index b45df9c4a9dbeb4c276364c549f61ea91945386f..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_results1.csv
+++ /dev/null
@@ -1,143 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,13641.815,4831.846,13915.106,6785.878,39174.645,1.21027176835866
-c2,13641.815,4831.846,13915.106,6785.878,39174.645,1.21027176835866
-c3,13641.815,4831.846,13915.106,6785.878,39174.645,1.21027176835866
-c4,13641.815,5715.816,13915.106,6785.878,40058.615,1.18356480576384
-c5,13641.815,4831.846,13915.106,6785.878,39174.645,1.21027176835866
-c6,13641.815,4831.846,13915.106,6785.878,39174.645,1.21027176835866
-c7,13641.815,4831.846,13915.106,6785.878,39174.645,1.21027176835866
-c8,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,39174.645
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,2945.2793,1169.3975,2509.2839,1504.1007,8128.0614,1.10258870457612
-c2,2945.2793,1169.3975,2509.2839,1504.1007,8128.0614,1.10258870457612
-c3,2945.2793,1169.3975,2509.2839,1504.1007,8128.0614,1.10258870457612
-c4,2945.2793,1196.6505,2509.2839,1504.1007,8155.3144,1.09890413177812
-c5,2945.2793,1169.3975,2509.2839,1504.1007,8128.0614,1.10258870457612
-c6,2945.2793,1169.3975,2509.2839,1504.1007,8128.0614,1.10258870457612
-c7,2945.2793,1169.3975,2509.2839,1504.1007,8128.0614,1.10258870457612
-c8,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,8128.0614
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,13738.7745,4831.846,13915.106,6785.878,39271.6045,1.20728367182634
-c2,13738.7745,4831.846,13915.106,6785.878,39271.6045,1.20728367182634
-c3,13738.7745,4831.846,13915.106,6785.878,39271.6045,1.20728367182634
-c4,13738.7745,5868.237,14102.876,6785.878,40495.7655,1.17078826137812
-c5,13738.7745,4831.846,13915.106,6785.878,39271.6045,1.20728367182634
-c6,13738.7745,4831.846,13915.106,6785.878,39271.6045,1.20728367182634
-c7,13738.7745,4831.846,13915.106,6785.878,39271.6045,1.20728367182634
-c8,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,39271.6045
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,96.9595,0,0,0,96.9595,0
-c2,96.9595,0,0,0,96.9595,0
-c3,96.9595,0,0,0,96.9595,0
-c4,96.9595,152.421,187.77,0,437.1505,0
-c5,96.9595,0,0,0,96.9595,0
-c6,96.9595,0,0,0,96.9595,0
-c7,96.9595,0,0,0,96.9595,0
-c8,0,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,56.6917,0,0,0,56.6917,0
-c2,56.6917,0,0,0,56.6917,0
-c3,56.6917,0,0,0,56.6917,0
-c4,56.6917,36.5672,49.3364,0,142.5953,0
-c5,56.6917,0,0,0,56.6917,0
-c6,56.6917,0,0,0,56.6917,0
-c7,56.6917,0,0,0,56.6917,0
-c8,0,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,3001.971,1169.3975,2509.2839,1504.1007,8184.7531,1.09495162297624
-c2,3001.971,1169.3975,2509.2839,1504.1007,8184.7531,1.09495162297624
-c3,3001.971,1169.3975,2509.2839,1504.1007,8184.7531,1.09495162297624
-c4,3001.971,1233.2177,2558.6203,1504.1007,8297.9097,1.08002003106855
-c5,3001.971,1169.3975,2509.2839,1504.1007,8184.7531,1.09495162297624
-c6,3001.971,1169.3975,2509.2839,1504.1007,8184.7531,1.09495162297624
-c7,3001.971,1169.3975,2509.2839,1504.1007,8184.7531,1.09495162297624
-c8,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,8184.7531
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_results2.csv b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_results2.csv
deleted file mode 100644
index 6d54eb0f35f60d2549ef8b12a44a85e28ec638e2..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_results2.csv
+++ /dev/null
@@ -1,154 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,15.343114,4831.846,13915.106,6785.878,25548.173114,1.85578697165005
-c2,15.343114,4831.846,13915.106,6785.878,25548.173114,1.85578697165005
-c3,15.343114,4831.846,13915.106,6785.878,25548.173114,1.85578697165005
-c4,15.343114,4831.846,13915.106,6785.878,25548.173114,1.85578697165005
-c5,15.343114,5715.816,13915.106,6785.878,26432.143114,1.79372389957723
-c6,15.343114,4831.846,13915.106,6785.878,25548.173114,1.85578697165005
-c7,15.343114,4831.846,13915.106,6785.878,25548.173114,1.85578697165005
-c8,15.343114,5715.816,17355.872,8070.831,31157.862114,1.52166944812718
-c9,15.343114,5715.816,17355.872,6785.878,29872.909114,1.58712252162505
-c1,25548.173114
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,7.919790,1169.3975,2509.2839,1504.1007,5190.70189,1.72653117386922
-c2,7.919790,1169.3975,2509.2839,1504.1007,5190.70189,1.72653117386922
-c3,7.919790,1169.3975,2509.2839,1504.1007,5190.70189,1.72653117386922
-c4,7.919790,1169.3975,2509.2839,1504.1007,5190.70189,1.72653117386922
-c5,7.919790,1196.6505,2509.2839,1504.1007,5217.95489,1.71751362692379
-c6,7.919790,1169.3975,2509.2839,1504.1007,5190.70189,1.72653117386922
-c7,7.919790,1169.3975,2509.2839,1504.1007,5190.70189,1.72653117386922
-c8,7.919790,1196.6505,2990.8553,1604.7097,5800.13529,1.54512062174466
-c9,7.919790,1196.6505,2990.8553,1504.1007,5699.52629,1.57239535125655
-c1,5190.70189
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,16269.448,5715.816,17355.872,8070.831,47411.967,0.999999997890828
-c1,90.458562,4831.846,13915.106,6785.878,25623.288562,1.85034667584701
-c2,90.458562,4831.846,13915.106,6785.878,25623.288562,1.85034667584701
-c3,90.458562,4831.846,13915.106,6785.878,25623.288562,1.85034667584701
-c4,90.458562,4831.846,13915.106,6785.878,25623.288562,1.85034667584701
-c5,90.458562,5715.816,14102.876,6785.878,26695.028562,1.77605979001964
-c6,90.458562,4831.846,13915.106,6785.878,25623.288562,1.85034667584701
-c7,90.458562,4831.846,13915.106,6785.878,25623.288562,1.85034667584701
-c8,90.458562,5715.816,17355.872,8070.831,31232.977562,1.5180098264433
-c9,90.458562,5715.816,17355.872,7022.197,30184.343562,1.5707469915832
-c1,25623.288562
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,28.951489,0,0,0,28.951489,0
-c2,28.951489,0,0,0,28.951489,0
-c3,28.951489,0,0,0,28.951489,0
-c4,28.951489,0,0,0,28.951489,0
-c5,28.951489,0,0,0,28.951489,0
-c6,28.951489,0,0,0,28.951489,0
-c7,28.951489,0,0,0,28.951489,0
-c8,28.951489,0,0,0,28.951489,0
-c9,28.951489,0,0,0,28.951489,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,46.163959,0,0,0,46.163959,0
-c2,46.163959,0,0,0,46.163959,0
-c3,46.163959,0,0,0,46.163959,0
-c4,46.163959,0,0,0,46.163959,0
-c5,46.163959,0,0,0,46.163959,0
-c6,46.163959,0,0,0,46.163959,0
-c7,46.163959,0,0,0,46.163959,0
-c8,46.163959,0,0,0,46.163959,0
-c9,46.163959,0,0,0,46.163959,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,10.930367,0,0,0,10.930367,0
-c2,10.930367,0,0,0,10.930367,0
-c3,10.930367,0,0,0,10.930367,0
-c4,10.930367,0,0,0,10.930367,0
-c5,10.930367,0,0,0,10.930367,0
-c6,10.930367,0,0,0,10.930367,0
-c7,10.930367,0,0,0,10.930367,0
-c8,10.930367,0,0,0,10.930367,0
-c9,10.930367,0,0,0,10.930367,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,187.77,0,187.77,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,236.319,236.319,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,49.3364,0,49.3364,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,49.0482,49.0482,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,3169.6933,1196.6505,2990.8553,1604.7097,8961.9088,0.999999988841663
-c1,18.850157,1169.3975,2509.2839,1504.1007,5201.632257,1.72290315518737
-c2,18.850157,1169.3975,2509.2839,1504.1007,5201.632257,1.72290315518737
-c3,18.850157,1169.3975,2509.2839,1504.1007,5201.632257,1.72290315518737
-c4,18.850157,1169.3975,2509.2839,1504.1007,5201.632257,1.72290315518737
-c5,18.850157,1196.6505,2558.6203,1504.1007,5278.221657,1.69790304625126
-c6,18.850157,1169.3975,2509.2839,1504.1007,5201.632257,1.72290315518737
-c7,18.850157,1169.3975,2509.2839,1504.1007,5201.632257,1.72290315518737
-c8,18.850157,1196.6505,2990.8553,1604.7097,5811.065657,1.54221431571386
-c9,18.850157,1196.6505,2990.8553,1553.1489,5759.504857,1.55602067658747
-c1,5201.632257
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Total,Improvement
-c0,0,0,0,0,0,0
-c1,0,0,0,0,0,0
-c2,0,0,0,0,0,0
-c3,0,0,0,0,0,0
-c4,0,0,0,0,0,0
-c5,0,0,0,0,0,0
-c6,0,0,0,0,0,0
-c7,0,0,0,0,0,0
-c8,0,0,0,0,0,0
-c9,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_tensors.txt b/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_tensors.txt
deleted file mode 100644
index 38f513ebc98833ddfdd95ef72d04b05aa60ed8a4..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/pipeline_GSME/pipeline_GSME_tensors.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-#Conv1,2
-Conv1,3133.72,16043.1,2912.47,13477.7,56.6917,96.9595,36.8048,222.547
-ClipRelu1,35.9733,226.348,32.8093,164.115,146.021,769.892,36.6263,179.956
-#Conv2,2
-Conv2,1160.86,5545.29,1136.75,4708.62,49.0656,235.394,36.5672,152.421
-ClipRelu2,35.7905,170.526,32.6475,123.226,145.654,550.465,36.5405,139.548
-#Conv3,2
-Conv3,2955.11,17131.1,2476.66,13751.7,49.3364,187.77,36.496,221.431
-ClipRelu3,35.7453,224.772,32.6239,163.406,145.606,770.056,36.4944,179.388
-#Conv4,3
-Conv4,1532.34,7694.78,1437.6,6507.38,49.0482,236.319,36.5705,171.537
-Add1,36.8449,189.882,32.7422,134.826,146.433,615.386,36.3565,149.573
-ClipRelu4,35.5248,186.169,33.7585,143.672,49.1495,200.587,36.4394,154.389
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_confs1.txt b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_confs1.txt
deleted file mode 100644
index 7361a062e7d4cc86d5c955dd4c7662d2e88175a6..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_confs1.txt
+++ /dev/null
@@ -1,34 +0,0 @@
-9 9 9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9,9 9
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,7,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,7,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,7,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_confs2.txt b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_confs2.txt
deleted file mode 100644
index 3d0ad476552644463569166477024d81f4fc4dd0..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_confs2.txt
+++ /dev/null
@@ -1,46 +0,0 @@
-9 9 9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9,9 9
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,7,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,7,7,8,8,8 8 8,8 8,8,8,7,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,7,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,7,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,7,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,7,7,8,8,8 8 8,8 8,8,8,7,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,7,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,7,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,7,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,7,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,7,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8 8,8,8,7,7,8,8,8 8 8,8 8,8,8,7,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,7,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,7,8 8,8 8,8,8,8 8 8,8 8,8,8,8 8 8,8 8,8,8,8,8 8
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_fp16.csv b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_fp16.csv
deleted file mode 100644
index 8aa9f4f5f1ab4418335e9f3eb0a9ed81035e44c2..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_fp16.csv
+++ /dev/null
@@ -1,220 +0,0 @@
-Add1,18.4067,51.7458,26.6786,25.0672,5671.44,2933.87,2737.57,13.0333,37.8611,20.1787,17.9155,4176.73,2237.06,1963.86
-Add10,25.4775,107.72,67.1668,40.5535,8455.96,5272.51,3183.45,18.0181,76.2046,47.5202,28.6861,5980.97,3729.6,2251.5
-Add10_f2h,60.3765,243.339,153.879,89.4597,8062.26,5098.54,2963.73,42.7111,172.129,108.851,63.281,5701.54,3605.88,2095.77
-Add10_h2f,20.7649,83.3467,51.6117,31.735,8027.75,4971.09,3056.66,14.6838,58.9536,36.5095,22.4455,5678,3516.32,2161.83
-Add11,10.3212,47.2214,28.8656,18.3558,9155.38,5597.11,3558.27,7.30548,33.4486,20.446,13.0032,6482.41,3963.32,2519.21
-Add11_f2h,15.502,62.8822,38.5801,24.3021,8113.12,4977.95,3135.17,10.9812,44.5638,27.3406,17.2241,5739.52,3521.8,2217.83
-Add11_h2f,11.2293,44.7914,27.2866,17.5048,7976.14,4859.07,3117.07,7.94857,31.74,19.3364,12.4042,5644.45,3438.81,2205.75
-Add12,9.45661,44.0115,27.6225,16.389,9310.33,5843.69,3466.64,6.69292,31.1762,19.5662,11.6103,6590.69,4136.82,2453.95
-Add12_f2h,40.1768,172.415,108.883,63.5318,8582.39,5419.84,3162.55,28.4316,122.029,77.0664,44.9633,6069.24,3832.87,2236.42
-Add12_h2f,10.4892,42.8418,26.8148,16.027,8169.01,5112.99,3056.02,7.41765,30.3184,18.9771,11.3417,5780.72,3618.28,2162.52
-Add13,9.59742,41.731,25.0231,16.7079,8695.68,5214.21,3481.47,6.79463,29.5817,17.7385,11.8435,6156.17,3691.56,2464.67
-Add13_f2h,41.9408,167.231,101.045,66.1867,7973.97,4817.94,3156.03,29.6717,118.333,71.503,46.8306,5639.03,3407.28,2231.81
-Add13_h2f,10.4872,39.7994,23.8049,15.9945,7589.81,4539.64,3050.17,7.41618,28.1695,16.8494,11.3204,5371.29,3212.81,2158.55
-Add14,11.4693,49.588,29.3241,20.2638,8647.95,5114.03,3533.91,8.11133,35.0867,20.7497,14.3374,6118.69,3618.52,2500.24
-Add14_f2h,27.8587,110.353,65.5177,44.8356,7922.71,4703.85,3218.86,19.7031,78.0565,46.3446,31.7133,5603.04,3326.8,2276.35
-Add14_h2f,10.4665,40.3814,23.8416,16.5399,7716.46,4555.89,3160.57,7.40173,28.5808,16.8747,11.7065,5461.05,3224.35,2236.77
-Add15,9.39457,42.3399,26.0019,16.338,9014.62,5536.14,3478.47,6.64916,29.9924,18.4192,11.5736,6380.26,3918.39,2461.94
-Add15_f2h,40.147,165.995,102.472,63.5229,8269.94,5105.32,3164.62,28.4279,117.537,72.5574,44.9805,5848.22,3610.39,2237.87
-Add15_h2f,10.4569,41.0957,25.1843,15.9113,7859.73,4816.65,3043.07,7.39464,29.0899,17.8276,11.2626,5562.97,3409.29,2153.74
-Add16,9.36312,41.8676,26.0997,15.7679,8940.62,5573.43,3367.19,6.62684,29.8234,18.5944,11.2294,6357.34,3963.59,2393.83
-Add16_f2h,40.662,165.398,103.735,61.6634,8136.51,5103.02,3033.49,28.7668,117.041,73.408,43.6342,5755.44,3609.72,2145.76
-Add16_h2f,10.4433,40.5802,25.225,15.3552,7771.59,4830.88,2940.71,7.3846,28.7184,17.8523,10.8664,5499.86,3418.89,2081.03
-Add17,11.2953,49.8165,30.5372,19.2793,8821.06,5407.25,3413.81,7.98744,35.2482,21.6076,13.641,6241.26,3825.96,2415.37
-Add17_f2h,31.2262,125.787,77.5267,48.2602,8058.48,4967.07,3091.41,22.0978,89.0044,54.8545,34.1509,5698.9,3512.81,2186.15
-Add17_h2f,10.4534,40.67,24.8815,15.7885,7780.74,4760.17,3020.57,7.392,28.7841,17.611,11.1736,5506.18,3368.83,2137.45
-Add18,9.33772,41.9974,26.362,15.6354,8997.13,5647.64,3349.49,6.60838,29.7471,18.6729,11.0745,6368.78,3997.95,2370.9
-Add18_f2h,40.52,167.742,105.928,61.8133,8279.12,5228.18,3050.94,28.6686,118.694,74.9571,43.7377,5854.67,3697.22,2157.48
-Add18_h2f,10.4893,41.2948,25.8578,15.437,7874.36,4930.71,2943.64,7.42007,29.2336,18.3062,10.9279,5572.7,3489.61,2083.18
-Add19,9.34031,41.5447,26.3195,15.2252,8899.53,5638.08,3261.45,6.61153,29.4292,18.6449,10.7846,6300.46,3991.71,2308.83
-Add19_f2h,40.5583,165.866,105.817,60.0492,8178.77,5217.79,2960.98,28.6938,117.359,74.8714,42.4879,5783.65,3689.81,2093.86
-Add19_h2f,10.4607,40.9009,25.8358,15.0651,7819.57,4939.38,2880.19,7.39838,28.9539,18.2906,10.6639,5534.12,3496.01,2038.21
-Add1_f2h,27.7102,72.282,37.7708,34.5112,5212.56,2723,2489.56,19.6021,53.3974,28.9158,24.796,3845.92,2081.8,1786.94
-Add1_h2f,20.7976,56.1939,29.1321,27.0618,5404.42,2801.87,2602.56,14.707,41.1458,21.9739,19.3755,3957.28,2113.49,1863.36
-Add2,19.5015,74.6206,46.4565,28.1641,7673.42,4779.3,2894.12,13.7947,53.0278,33.0775,19.9657,5465.36,3412.01,2054.92
-Add20,11.2352,49.6507,30.9293,18.7214,8839.5,5506.45,3333.05,7.95735,35.1825,21.9167,13.266,6254.61,3896.25,2358.38
-Add20_f2h,31.0414,125.831,78.8493,46.9812,8107.56,5080.5,3027.06,21.9778,89.0958,55.8298,33.2664,5733.46,3592.82,2140.65
-Add20_h2f,10.4493,41.12,25.5455,15.5745,7870.47,4889.47,2981,7.38909,29.1003,18.0786,11.0219,5569.71,3460.19,2109.56
-Add21,9.3646,42.9504,26.4829,16.4675,9177.48,5658.87,3518.61,6.65751,30.5527,18.8396,11.7135,6498.66,4007.28,2491.48
-Add21_f2h,8.53298,33.0218,20.3905,12.6312,7739.9,4779.34,2960.57,6.03492,23.3902,14.4432,8.94732,5481.37,3384.74,2096.7
-Add21_h2f,5.65061,21.0508,12.9633,8.08745,7451.76,4588.85,2862.92,3.99628,14.9378,9.19932,5.73878,5287.67,3256.31,2031.46
-Add22,8.76401,41.0048,25.7147,15.2902,9358.69,5869.07,3489.62,6.20359,29.0753,18.2344,10.8413,6629.91,4158.08,2471.93
-Add22_f2h,20.0626,83.8331,52.7203,31.1128,8355.25,5254.22,3101.03,14.2023,59.3731,37.3398,22.0338,5909.48,3716.25,2193.27
-Add22_h2f,5.28191,19.8187,12.4203,7.39838,7501.83,4701.31,2800.52,3.73584,14.076,8.82147,5.25475,5324.53,3336.79,1987.84
-Add23,8.86814,40.4053,24.764,15.6413,9108.08,5582.5,3525.58,6.27795,28.715,17.5981,11.1173,6460.37,3959.65,2500.81
-Add23_f2h,20.3149,81.1815,49.9289,31.2526,7990.12,4914.06,3076.06,14.3871,57.5519,35.3971,22.1552,5654.05,3477.38,2176.71
-Add23_h2f,5.29007,18.9716,11.6231,7.34846,7171.96,4393.94,2778.02,3.74105,13.4814,8.25981,5.22185,5095.23,3121.71,1973.61
-Add24,11.2873,49.7509,30.2964,19.4545,8815.75,5368.47,3447.27,7.98223,35.2025,21.4374,13.7657,6237.44,3798.46,2439.07
-Add24_f2h,14.405,57.1891,34.8867,22.3024,7940.5,4843.94,3096.56,10.1872,40.4599,24.6826,15.7779,5617.28,3426.88,2190.47
-Add24_h2f,5.33388,19.3044,11.7551,7.54925,7310.5,4451.56,2858.94,3.83415,13.6997,8.34285,5.35713,5195.18,3163.71,2031.57
-Add25,8.75476,40.2434,24.9644,15.279,9195.92,5704.85,3491.07,6.19722,28.5635,17.7193,10.8446,6522.5,4046.65,2475.93
-Add25_f2h,20.1161,82.2092,51.1235,31.0857,8171.72,5081.58,3090.13,14.2438,58.2576,36.2306,22.0276,5781.64,3595.38,2186.32
-Add25_h2f,5.27612,19.5688,12.1329,7.43597,7418.53,4599.54,2818.99,3.73158,13.8926,8.61378,5.27904,5266.09,3265.1,2001.08
-Add26,8.7674,40.3179,25.3121,15.0058,9197.87,5774.46,3423.41,6.20711,28.5804,17.9435,10.6371,6512.63,4088.68,2423.99
-Add26_f2h,20.1714,82.6833,52.0254,30.6579,8196.8,5157.5,3039.3,14.2777,58.546,36.8384,21.7078,5797.29,3647.74,2149.57
-Add26_h2f,5.2755,19.4721,12.2095,7.26253,7382.4,4628.99,2753.42,3.73117,13.8207,8.66606,5.15474,5238.94,3285.02,1953.97
-Add27,11.1843,50.4854,31.4916,18.9938,9027.49,5631.16,3396.33,7.9091,35.73,22.288,13.4425,6388.21,3984.94,2403.36
-Add27_f2h,17.215,69.9894,43.7366,26.2528,8132.49,5082.37,3050.12,12.1926,49.5781,30.9806,18.5981,5752.34,3595.04,2157.37
-Add27_h2f,5.28841,19.6402,12.2402,7.40007,7427.35,4628.85,2798.5,3.74078,13.9399,8.68767,5.25242,5269.55,3284.09,1985.53
-Add28,8.73735,40.0918,25.2493,14.8425,9179.93,5781.38,3398.55,6.18426,28.4037,17.8884,10.5155,6499.5,4093.29,2406.26
-Add28_f2h,20.0577,82.5983,52.1453,30.453,8234.96,5198.73,3036.23,14.212,58.5442,36.9609,21.5836,5824.23,3676.89,2147.37
-Add28_h2f,5.33532,19.8316,12.4781,7.35346,7421.97,4670.26,2751.71,3.83712,14.3924,9.0537,5.33892,5266.13,3313.81,1952.39
-Add29,8.73956,39.8315,25.3262,14.5053,9114.93,5795.69,3319.24,6.18539,28.2229,17.9451,10.2782,6452.34,4102.78,2349.66
-Add29_f2h,19.9302,82.4415,52.5543,29.8873,8271.76,5273.03,2998.74,14.1093,58.388,37.2219,21.1665,5850.58,3729.7,2120.93
-Add29_h2f,5.31975,19.701,12.5142,7.18678,7475.14,4748.32,2726.81,3.80674,13.9904,8.88687,5.10378,5314.32,3375.78,1938.64
-Add2_f2h,27.9959,99.3789,62.5849,36.794,7108.69,4477.52,2631.18,19.8106,70.7329,44.6346,26.1164,5062.88,3195.89,1868.28
-Add2_h2f,21.8283,78.8418,48.6398,30.202,7222.01,4455.29,2766.73,15.4364,56.1344,34.7155,21.4366,5140.28,3178.69,1963.21
-Add3,19.3839,84.2165,55.3964,28.8201,8699.77,5723.1,2976.67,13.7121,59.6058,39.2138,20.3932,6162.34,4054.84,2107.63
-Add30,11.2081,50.3589,31.7726,18.5862,8986.61,5669.86,3316.75,7.92595,35.6402,22.487,13.1536,6359.89,4012.74,2347.24
-Add30_f2h,17.4758,71.5971,45.271,26.3262,8195.3,5182.19,3013.12,12.3884,50.7593,32.0942,18.6656,5796.6,3665.49,2131.17
-Add30_h2f,5.27643,19.4931,12.2911,7.20202,7388.83,4658.89,2729.94,3.73199,13.8463,8.73137,5.11513,5246.89,3308.6,1938.37
-Add31,0.396442,1.22564,0.764432,0.461209,6165.78,3846.09,2319.7,0.284326,1.58002,0.98564,0.594413,8102.09,5055.82,3046.44
-Add31_f2h,0.809076,1.36364,0.850185,0.513452,2660.4,1659.12,1001.27,0.636169,1.498,0.93363,0.564389,2743.85,1710.87,1033.03
-Add31_h2f,0.129454,0.0344863,0.0215079,0.0129784,514.231,320.98,193.251,0.0934957,0.085008,0.0529757,0.0320343,1260.83,787.256,473.598
-Add3_f2h,27.9995,113.419,75.4189,38,8101.81,5387.42,2714.39,19.8133,80.4002,53.4775,26.9247,5739.24,3817.49,1921.9
-Add3_h2f,21.828,88.7986,57.8825,30.9161,8136.52,5303.75,2832.77,15.4361,62.9262,41.0275,21.9002,5765.62,3759.19,2006.56
-Add4,25.2021,109.339,69.1431,40.1956,8679.65,5488.99,3190.66,17.8374,77.4612,49.0014,28.4636,6145.82,3888.12,2258.01
-Add4_f2h,54.8381,227.205,145.608,81.5971,8290.89,5313.68,2977.21,38.8148,160.96,103.191,57.7775,5871.03,3764.37,2106.96
-Add4_h2f,20.8082,85.9608,53.9681,31.9927,8261.17,5186.46,3074.71,14.7156,60.8827,38.2401,22.6459,5849.43,3673.87,2175.88
-Add5,18.3902,84.3374,55.4563,28.881,9158.85,6020.58,3138.27,13.0145,59.9538,39.4588,20.5013,6495.82,4272.67,2223.85
-Add5_f2h,52.5577,223.355,147.844,75.5108,8633.1,5736.35,2896.75,41.0843,172.923,114.184,58.7452,6111.46,4062.06,2049.8
-Add5_h2f,21.3908,91.48,59.7227,31.7573,8543.72,5576.36,2967.36,15.1322,64.8702,42.3847,22.4926,6049.21,3950.45,2099.43
-Add6,17.4475,76.3058,49.8737,26.4321,8757.37,5725.3,3032.07,12.3416,53.9826,35.2918,18.696,6200.62,4055.79,2145.43
-Add6_f2h,78.6158,330.238,219.427,110.811,8421.49,5598.56,2822.92,55.6768,233.593,155.218,78.4009,5961.87,3965.63,1996.89
-Add6_h2f,20.8524,85.6316,55.5811,30.0505,8212.43,5330.35,2882.08,14.7456,60.6346,39.379,21.2623,5814.33,3775.95,2039.03
-Add7,24.9686,108.541,68.418,40.1235,8695.27,5481.14,3214.12,17.6568,76.8331,48.4614,28.383,6155.36,3882.66,2273.62
-Add7_f2h,62.7404,259.957,165.983,93.9749,8308.17,5308.25,2999.92,44.4591,183.937,117.441,66.5205,5881.22,3760,2121.98
-Add7_h2f,20.8101,86.1797,53.9434,32.2363,8283.86,5185.4,3098.46,14.7162,61.0198,38.2202,22.8086,5865.81,3674.38,2192.31
-Add8,17.6899,77.5743,50.2264,27.348,8775.77,5682.64,3093.13,12.5138,54.8845,35.5378,19.3485,6210.13,4022.02,2188.31
-Add8_f2h,83.3626,352.496,231.707,120.789,8457.12,5559.21,2897.92,58.9719,249.424,163.976,85.4559,5981.72,3932.59,2049.31
-Add8_h2f,20.8989,85.9449,55.2763,30.6686,8225.31,5290.3,2935.01,14.7816,60.8117,39.1174,21.6967,5818.86,3743.18,2075.91
-Add9,17.6956,75.1977,48.6944,26.5033,8500.24,5504.64,2995.6,12.5184,53.2155,34.46,18.7562,6013.36,3894.4,2119.04
-Add9_f2h,82.8254,337.55,222.322,115.228,8151.08,5368.6,2782.49,58.5965,238.83,157.31,81.5231,5764.4,3796.86,1967.62
-Add9_h2f,20.8517,82.5853,53.0859,29.4994,7921.12,5091.73,2829.38,14.7458,58.4972,37.6036,20.8945,5610.17,3606.42,2003.84
-Conv1,174.622,421.265,201.391,219.874,4799.44,2288.31,2511.12,123.525,321.252,166.184,159.301,3644.22,1880.24,1813.82
-Conv10,90.3128,381.213,233.648,147.565,8444.02,5175.72,3268.3,63.8945,269.653,165.267,104.388,5971.16,3660.1,2311.1
-Conv10_f2h,32.0547,135.935,84.4552,51.4794,8482.08,5270.04,3212.04,22.6804,96.1837,59.7582,36.4267,5998.4,3727.01,2271.46
-Conv10_h2f,10.5675,40.812,24.9802,15.8318,7723.92,4727.67,2996.25,7.47381,28.8877,17.6823,11.206,5466.01,3345.8,2120.33
-Conv11,140.577,589.922,354.18,235.742,8393.31,5039.32,3353.99,99.4117,417.168,250.461,166.709,5935.19,3563.56,2371.67
-Conv11_f2h,15.1273,59.0802,34.3177,24.7625,7812.41,4538.14,3274.27,10.6992,41.7989,24.28,17.5198,5526.88,3210.68,2316.31
-Conv11_h2f,10.5792,42.4989,26.5104,15.9885,8034.13,5011.61,3022.52,7.48174,30.082,18.7653,11.3169,5685.66,3546.75,2138.95
-Conv12,167.515,695.778,426.345,269.434,8307.72,5090.78,3216.94,118.467,492.044,301.501,190.545,5874.73,3599.97,2274.79
-Conv12_f2h,14.881,59.0727,35.667,23.4057,7952.24,4801.46,3150.78,10.5393,41.8586,25.2741,16.585,5631.84,3400.55,2231.35
-Conv12_h2f,10.5947,41.783,26.4937,15.2892,7887.33,5001.18,2886.15,7.493,29.5751,18.7533,10.8221,5581.66,3539.24,2042.45
-Conv13,163.856,684.766,420.14,264.626,8358.58,5128.51,3230.07,115.875,484.236,297.105,187.133,5910.6,3626.58,2284.04
-Conv13_f2h,14.6929,58.143,34.969,23.174,7914.08,4759.78,3154.29,10.3926,41.147,24.7477,16.3996,5598.75,3367.36,2231.44
-Conv13_h2f,10.5613,42.4051,27.0983,15.3068,8030.86,5132,2898.86,7.46856,30.0103,19.1779,10.8326,5683.46,3632,2051.49
-Conv14,166.656,693.932,433.67,260.262,8328.27,5204.82,3123.45,117.858,490.722,306.673,184.051,5889.14,3680.52,2208.65
-Conv14_f2h,14.7899,59.1467,36.4903,22.6564,7995.87,4932.92,3062.95,10.4854,41.9732,25.8964,16.0771,5656.55,3489.75,2166.83
-Conv14_h2f,10.5466,41.836,27.0179,14.8181,7933.62,5123.59,2810.03,7.45794,29.6061,19.1205,10.4858,5614.19,3625.83,1988.41
-Conv15,38.244,164.048,100.733,63.3146,8579.14,5268.08,3311.06,27.046,116.03,71.2479,44.7823,6067.29,3725.73,2341.58
-Conv15_f2h,14.5914,58.2375,35.5473,22.6902,7982.55,4872.46,3110.09,10.3186,41.2036,25.1511,16.0533,5647.33,3447.24,2200.2
-Conv15_h2f,5.36591,19.9204,12.363,7.55743,7424.2,4607.58,2816.61,3.79436,14.1705,8.79504,5.37569,5280.79,3277.55,2003.32
-Conv16,65.7442,285.597,177.243,108.354,8688.58,5392.23,3296.35,46.4938,201.969,125.343,76.6265,6144,3813.07,2330.95
-Conv16_f2h,8.07266,31.1377,19.0284,12.1093,7714.42,4714.32,3000.1,5.70882,22.0551,13.478,8.57725,5463.71,3338.92,2124.82
-Conv16_h2f,5.37147,20.4791,12.9421,7.53703,7625.01,4818.74,2806.27,3.79835,14.5391,9.18856,5.35066,5413.07,3421,1992.12
-Conv17,42.064,181.248,112.411,68.8372,8619.77,5346.16,3273.62,29.7779,128.287,79.5627,48.7247,6095.56,3780.63,2314.96
-Conv17_f2h,16.9562,69.8631,43.5974,26.2657,8239.64,5141.83,3097.8,12.0049,49.482,30.8794,18.6029,5827.98,3636.93,2191.09
-Conv17_h2f,5.36951,19.8329,12.2666,7.56627,7386.95,4568.8,2818.14,3.7977,14.0692,8.70208,5.36729,5238.68,3240.19,1998.54
-Conv18,67.3709,286.812,175.784,111.027,8515.35,5219.14,3296.21,47.6566,202.876,124.339,78.5384,6021.72,3690.82,2330.93
-Conv18_f2h,8.03381,30.3417,18.3174,12.0243,7553.07,4559.83,2993.24,5.68152,21.5474,13.0086,8.539,5363.11,3237.82,2125.33
-Conv18_h2f,5.36257,20.0592,12.5343,7.52491,7480.48,4674.3,2806.19,3.79235,14.2331,8.89391,5.33933,5306.51,3315.91,1990.66
-Conv19,80.3214,341.92,212.062,129.858,8514.78,5281.11,3233.67,56.8122,241.827,149.981,91.8466,6021.13,3734.55,2286.62
-Conv19_f2h,8.10378,31.2805,19.2244,12.0561,7714.35,4741.22,2973.13,5.74732,22.2537,13.6761,8.5778,5464.6,3358.57,2106.07
-Conv19_h2f,5.41437,20.2452,12.8106,7.43458,7545.76,4774.79,2770.98,3.86967,14.3806,9.10008,5.28076,5364.57,3394.74,1969.92
-Conv1_f2h,6.3224,13.0911,5.82586,7.26522,4088.68,1806.81,2281.87,4.50384,10.2562,5.13352,5.33075,3166.3,1578.37,1655.62
-Conv1_h2f,20.9136,52.8942,27.7814,25.1128,5059.28,2657.42,2401.86,14.789,39.2661,21.4514,18.0694,3756.14,2052.16,1728.3
-Conv2,307.782,1024.12,595.423,428.699,6653.72,3868.33,2785.39,217.646,734.491,430.209,305.079,4771.02,2794.33,1981.87
-Conv20,78.1278,333.871,207.881,125.989,8547.41,5322.05,3225.36,55.2511,236.106,147.009,89.0988,6044.29,3763.53,2280.79
-Conv20_f2h,8.0013,30.8855,19.0615,11.824,7719.03,4763.93,2955.1,5.65835,21.9342,13.5377,8.39683,5481.37,3383.09,2098.36
-Conv20_h2f,5.3629,19.8115,12.5793,7.23219,7388.9,4691.57,2697.33,3.79279,14.0609,8.92816,5.1329,5243.67,3329.52,1914.21
-Conv21,80.2786,343.147,215.866,127.282,8549.99,5378.74,3171.25,56.7803,242.685,152.666,90.0207,6046.01,3803.58,2242.47
-Conv21_f2h,8.01787,30.88,19.2978,11.5821,7702.66,4813.67,2888.99,5.67027,21.8781,13.6724,8.20592,5456.47,3410.01,2046.52
-Conv21_h2f,5.37001,20.3721,13.0519,7.3202,7587.08,4860.82,2726.26,3.79825,14.4575,9.26289,5.19478,5382.54,3448.52,1934.07
-Conv2_f2h,27.627,79.905,40.5987,39.3063,5780.5,2936.19,2844.31,19.5433,58.0274,30.2178,28.055,4192.93,2182.46,2028.31
-Conv2_h2f,20.9352,73.9105,47.1414,26.7692,7060.33,4503.14,2557.2,14.8049,52.7684,33.7394,19.0438,5039.88,3222.35,1818.95
-Conv3,309.083,1232.59,785.441,447.15,7978.42,5084.33,2894.09,218.568,873.993,557.401,316.681,5658.77,3609.32,2050.03
-Conv3_f2h,27.8734,103.065,61.5928,41.4719,7402.94,4425.27,2977.67,19.7311,73.258,43.8773,29.4084,5262.47,3153.68,2110.77
-Conv3_h2f,20.9195,84.5406,56.8816,27.659,8083.02,5438.55,2644.47,14.7928,59.8999,40.3131,19.5884,5727.3,3854.57,1872.89
-Conv4,322.719,1401.3,895.195,506.101,8698.14,5558.46,3139.68,228.36,990.931,633.099,357.9,6156.32,3935.77,2220.97
-Conv4_f2h,27.8507,115.914,70.2976,45.6165,8325.55,5049.3,3276.25,19.7057,82.0999,49.8158,32.291,5894.33,3576.74,2318.09
-Conv4_h2f,20.9916,91.4069,61.7262,29.6807,8709.15,5881.22,2827.93,14.8444,64.6849,43.6858,21,6162.81,4162.15,2000.74
-Conv5,342.079,1466.82,953.788,513.027,8625.68,5618.29,3007.38,243.025,1037.96,674.59,363.661,6110.16,3984.53,2127.34
-Conv5_f2h,29.7996,126.901,80.1563,46.7446,8541.6,5399.26,3142.34,21.1314,89.8283,56.7306,33.108,6046.59,3824.36,2222.92
-Conv5_h2f,21.0449,89.4069,60.7654,28.6415,8496.02,5774.26,2721.76,14.8823,63.3142,43.0491,20.2693,6015.41,4089.96,1925.85
-Conv6,366.167,1544.8,970.645,574.154,8437.56,5301.57,3135.99,258.931,1092.95,686.959,406.064,5969.24,3751.88,2217.78
-Conv6_f2h,29.7189,123.016,74.1021,48.9137,8298.6,5003.1,3295.51,21.0821,87.1348,52.4765,34.6703,5873.05,3542.91,2330.96
-Conv6_h2f,21.0334,89.908,60.4888,29.4191,8548.78,5751.46,2797.32,14.8743,63.6171,42.8066,20.812,6048.14,4069.61,1978.68
-Conv7,370.721,1526.79,975.829,550.957,8238.3,5265.69,2972.61,262.168,1079.73,690.151,389.613,5826.45,3724.58,2102.05
-Conv7_f2h,28.6144,117.921,73.4724,44.4486,8244.58,5137.45,3107.13,20.2404,83.4145,51.9759,31.4423,5831.8,3634.58,2197.47
-Conv7_h2f,20.9627,85.9711,57.9984,27.9727,8202.44,5533.61,2668.84,14.8231,60.8081,41.0246,19.7842,5801.7,3914.18,1887.59
-Conv8,85.7697,364.625,220.731,143.895,8503.51,5147.94,3355.57,60.6542,257.845,156.091,101.757,6013.43,3640.68,2372.84
-Conv8_f2h,28.2226,114.351,68.3001,46.0511,8103.03,4839.74,3263.29,19.9598,80.8973,48.3224,32.5765,5731.14,3423.29,2307.96
-Conv8_h2f,10.5944,42.4202,26.1786,16.2416,8007.38,4941.51,3065.88,7.49241,30.028,18.5325,11.496,5666.85,3497.34,2169.6
-Conv9,138.631,604.548,373.126,231.422,8721.95,5383.25,3338.7,98.0322,427.509,263.864,163.649,6167.67,3806.87,2360.86
-Conv9_f2h,14.972,60.9,36.4674,24.4325,8135.05,4871.29,3263.76,10.589,43.0913,25.8049,17.2872,5754.87,3446.19,2308.78
-Conv9_h2f,10.6041,44.2087,28.2382,15.9704,8337.89,5325.84,3012.05,7.49957,31.2942,19.9899,11.3047,5900.93,3769.39,2131.62
-Mul1,1.17541,5.23179,3.26342,1.96837,9516.94,5936.74,3580.2,0.876982,4.143,2.58393,1.55914,7779.43,4853.1,2926.46
-Mul1_f2h,0.866649,1.15065,0.717796,0.432853,2581.39,1610.48,970.91,0.63788,1.21901,0.760487,0.45855,2684.43,1674.92,1009.55
-Mul1_h2f,0.182164,0.0453634,0.0282781,0.0170852,423.822,264.119,159.703,0.132397,0.122776,0.0765148,0.0462633,1112.34,692.957,419.399
-Pool1,5.19545,23.3959,14.6038,8.79204,9005.65,5621.39,3384.26,3.67464,16.5981,10.3611,6.23727,6386.83,3986.88,2400.04
-Pool1_f2h,7.76594,29.7619,18.5909,11.171,7665.15,4788.08,2877.06,5.49184,21.0853,13.1714,7.9142,5430.35,3392.22,2038.2
-Pool1_h2f,0.247834,0.0992326,0.0620025,0.0372301,788.32,492.524,295.796,0.176061,0.193642,0.121002,0.0726439,1528.12,954.657,573.489
-Relu1,15.7218,47.6272,24.3858,23.2413,6068.37,3109.12,2959.26,11.12,34.6311,18.1982,16.591,4417.52,2323.69,2113.76
-Relu10,7.86875,36.0799,21.9123,14.1676,9171.18,5569.92,3601.25,5.56493,25.5542,15.5208,10.0338,6495.21,3945.03,2550.3
-Relu10_f2h,14.7797,59.1119,36.0231,23.0889,7999.45,4875.03,3124.43,10.4543,41.8299,25.4918,16.3388,5659.12,3448.94,2210.27
-Relu10_h2f,10.4938,41.2344,25,16.2344,7859.14,4764.95,3094.19,7.42133,29.1837,17.6947,11.4895,5561.82,3372.3,2189.62
-Relu11,8.59732,38.8844,23.5484,15.3361,9045.57,5477.91,3567.66,6.08155,27.547,16.6833,10.8639,6405.63,3879.35,2526.35
-Relu11_f2h,14.7683,58.4599,35.5476,22.9123,7916.54,4813.8,3102.74,10.4441,41.3655,25.1536,16.2123,5600.65,3405.66,2195.05
-Relu11_h2f,11.0992,43.4074,26.2047,17.2027,7820.83,4721.3,3099.53,7.85061,30.7308,18.5528,12.1783,5534.58,3341.23,2193.41
-Relu12,7.85466,36.5025,22.6357,13.8668,9292.64,5762.53,3530.1,5.55462,26.0339,16.1441,9.89001,6622.72,4106.96,2515.83
-Relu12_f2h,14.3058,56.635,35.2456,21.3894,7924.65,4931.75,2992.9,10.1179,40.145,24.9837,15.1618,5617.17,3495.78,2121.45
-Relu12_h2f,10.4801,41.3643,25.6069,15.7575,7893.64,4886.59,3007.04,7.41178,29.2778,18.1254,11.1528,5586.04,3458.19,2127.9
-Relu13,8.56125,39.5474,24.3571,15.1903,9237.04,5689.16,3547.87,6.05455,28.1545,17.3375,10.8174,6571.95,4047.17,2524.85
-Relu13_f2h,14.7184,58.0328,35.8934,22.1394,7898.2,4885.07,3013.13,10.4134,41.0859,25.4118,15.6744,5593.87,3459.87,2134.05
-Relu13_h2f,11.0576,43.5721,26.7531,16.819,7881.1,4838.94,3042.16,7.82003,30.8389,18.9355,11.9037,5577.37,3424.54,2152.88
-Relu14,3.97634,19.8199,12.1465,7.6734,9965.36,6107.18,3858.18,2.81232,14.0894,8.63491,5.45462,7079.91,4338.99,2741
-Relu14_f2h,7.82028,30.3544,18.6345,11.72,7763.83,4766.17,2997.66,5.53103,21.5062,13.2029,8.30349,5500.08,3376.57,2123.59
-Relu14_h2f,5.27702,19.5391,11.9656,7.57355,7405.25,4534.9,2870.36,3.73208,13.8726,8.4959,5.37685,5256.59,3219.25,2037.4
-Relu15,4.46626,21.8376,13.2267,8.61093,9781.82,5924.68,3857.13,3.15993,15.5046,9.39167,6.11315,6943.47,4205.89,2737.67
-Relu15_f2h,7.89618,29.6604,18.0015,11.6589,7507.67,4556.56,2951.11,5.60608,21.1257,12.8217,8.30428,5317.97,3227.59,2090.44
-Relu15_h2f,5.52563,20.1041,12.1597,7.94437,7274.55,4399.92,2874.63,3.90829,14.2691,8.63063,5.63864,5160.14,3121.08,2039.12
-Relu16,3.96712,20.0653,12.3763,7.689,10117.4,6240.44,3876.96,2.80579,14.2471,8.7882,5.45913,7183.49,4431.09,2752.52
-Relu16_f2h,7.75796,29.607,18.2965,11.3105,7632.82,4716.92,2915.9,5.48702,20.9737,12.9613,8.01269,5405.95,3340.76,2065.26
-Relu16_h2f,5.27422,19.6466,12.1051,7.54152,7449.65,4590.04,2859.6,3.73078,13.9436,8.59179,5.35198,5285.08,3256.59,2028.56
-Relu17,4.46968,21.963,13.6141,8.34894,9828.48,6092.31,3736.18,3.16255,15.5965,9.66821,5.92856,6975.6,4324.14,2651.58
-Relu17_f2h,8.1633,31.6672,19.672,11.9952,7758.66,4819.83,2938.83,5.77354,22.4306,13.9347,8.4963,5494.62,3413.55,2081.16
-Relu17_h2f,5.53729,20.6582,12.7873,7.87092,7461.37,4618.48,2842.9,3.91618,14.6649,9.07828,5.5869,5295.73,3278.22,2017.6
-Relu18,3.96121,19.9626,12.504,7.45868,10080,6313.84,3766.17,2.80174,14.1804,8.88221,5.29831,7159.1,4484.31,2674.84
-Relu18_f2h,7.75018,29.9637,18.8008,11.1629,7732.03,4851.48,2880.55,5.4848,21.2416,13.3283,7.91355,5476.67,3436.38,2040.34
-Relu18_h2f,5.25633,19.6445,12.2951,7.34942,7474.65,4678.23,2796.42,3.71689,13.9391,8.72404,5.21518,5303.65,3319.39,1984.31
-Relu19,4.48848,22.1017,13.8543,8.24737,9855.95,6178.04,3677.91,3.18658,15.7342,9.86359,5.87074,6998.62,4387.13,2611.56
-Relu19_f2h,8.14713,31.7315,19.936,11.7955,7788.29,4893.2,2895.1,5.76233,22.481,14.1242,8.35708,5515.4,3465.21,2050.25
-Relu19_h2f,5.53758,20.7902,13.0137,7.77652,7509.53,4700.58,2808.94,3.91635,14.7505,9.2334,5.51724,5327.62,3334.91,1992.78
-Relu1_f2h,27.4697,75.5402,38.6841,36.8561,5510.99,2824.48,2686.52,19.4328,55.1294,29.0815,26.3358,4027.76,2127.42,1921.08
-Relu1_h2f,20.8432,59.4527,30.4349,29.0178,5705.97,2921.25,2784.72,14.7403,43.2523,22.743,20.713,4151.39,2183.21,1987.71
-Relu2,15.6922,62.2018,37.6932,24.5087,7928.92,4804.97,3123.95,11.0981,44.251,26.8823,17.3847,5640.39,3426.77,2215.66
-Relu2_f2h,27.4183,99.9831,61.0632,38.9199,7289.67,4451.68,2837.98,19.3974,71.2089,43.604,27.6298,5186.27,3175.2,2012.89
-Relu2_h2f,20.833,77.2117,46.7116,30.5001,7413.36,4485.02,2928.34,14.733,54.8865,33.2729,21.6286,5269.9,3194.8,2076.54
-Relu3,16.7972,74.5864,45.9807,28.6058,8880.69,5474.72,3405.97,11.8791,52.8166,32.5723,20.2471,6287.66,3877.63,2410.37
-Relu3_f2h,27.5363,113.932,70.807,43.1255,8274.93,5142.95,3131.98,19.4862,80.7269,50.1877,30.5436,5858.58,3642.6,2216.3
-Relu3_h2f,21.969,91.4346,55.9909,35.4437,8323.89,5097.22,3226.67,15.5355,64.7301,39.6523,25.081,5892.34,3609.53,2283.11
-Relu4,15.7301,72.5141,46.4394,26.0747,9222.54,5906.65,3315.89,11.1258,51.3576,32.9074,18.4553,6532.12,4185.96,2346.8
-Relu4_f2h,27.4947,117.455,75.7723,41.6831,8542.99,5511.04,3031.95,19.4508,83.1966,53.701,29.5038,6047.59,3903.27,2144.91
-Relu4_h2f,20.8274,89.0752,56.8414,32.2339,8553.76,5458.43,3095.33,14.7283,63.0717,40.27,22.8082,6056.29,3866.87,2190.05
-Relu5,17.3272,77.2477,47.3925,29.8552,8915.61,5469.7,3445.92,12.2651,54.7406,33.6028,21.1449,6311.35,3874.11,2438.06
-Relu5_f2h,30.181,124.467,77.0351,47.4321,8235.63,5094.97,3140.67,21.369,88.3475,54.7363,33.6223,5829.28,3608.44,2221.57
-Relu5_h2f,21.9931,91.2,55.5979,35.6021,8293.67,5056.11,3237.57,15.5589,64.5961,39.4043,25.2004,5871.35,3581.64,2290.5
-Relu6,15.8935,70.7734,44.6559,26.1175,8909.94,5622.41,3287.53,11.2415,50.1636,31.6568,18.5089,6317.03,3987.22,2330.08
-Relu6_f2h,29.4192,120.534,76.6245,43.9095,8205.47,5217.71,2987.77,20.8359,85.3161,54.229,31.0903,5806.09,3692.5,2113.8
-Relu6_h2f,20.9166,86.1995,54.1674,32.0321,8242.75,5179.8,3062.95,14.7909,60.9747,38.322,22.6552,5830.79,3664.73,2166.31
-Relu7,16.8139,72.987,44.3367,28.6503,8682.5,5274.35,3408.15,11.8908,51.6385,31.3713,20.2688,6142.58,3731.84,2410.93
-Relu7_f2h,28.3017,113.854,69.72,44.1342,8047.34,4928.22,3119.11,20.0174,80.5279,49.3132,31.2168,5691.5,3485.8,2205.85
-Relu7_h2f,22.041,89.2722,53.8809,35.3913,8101.07,4889.65,3211.41,15.6007,63.1996,38.1463,25.0552,5730.02,3458.88,2271.31
-Relu8,8.6595,40.1854,24.2306,15.9548,9283.68,5598.03,3685.64,6.13232,28.4788,17.1721,11.3072,6572.26,3963.36,2609.04
-Relu8_f2h,15.0619,61.1893,37.0436,24.1457,8125.34,4919.08,3206.27,10.652,43.2952,26.2123,17.0839,5748.51,3480.4,2268.24
-Relu8_h2f,10.7582,43.1574,25.9549,17.2025,8022.66,4824.84,3197.82,7.60841,30.5473,18.3724,12.1756,5677.22,3414.52,2262.83
-Relu9,8.57887,38.5238,22.5326,15.9912,8978.24,5251.35,3726.89,6.06691,27.4339,16.0488,11.3857,6387.85,3736.81,2651.17
-Relu9_f2h,14.4553,55.9886,32.856,23.1325,7749.62,4547.83,3201.79,10.2233,39.6978,23.2972,16.4014,5494.46,3224.62,2269.95
-Relu9_h2f,11.1054,43.0539,25.1182,17.9357,7754,4523.73,3230.27,7.85503,30.4771,17.782,12.6957,5487.43,3201.59,2285.95
-Softmax1,2.22516,6.67368,4.16081,2.51286,5989.43,3734.33,2255.1,1.57533,4.88219,3.04381,1.83848,4370.27,2724.81,1645.55
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_fp32.csv b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_fp32.csv
deleted file mode 100644
index 1c825c847259023cf9ab4d78b89dda08710ec175..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_fp32.csv
+++ /dev/null
@@ -1,74 +0,0 @@
-Add1,21.6934,58.7548,31.0822,27.6726,5431.3,2876.93,2554.37,15.3418,45.0353,25.8605,19.9268,4170.35,2397.52,1841.24
-Add10,32.0305,157.253,106.44,50.8121,9819.95,6647.01,3172.94,22.6511,111.473,75.5251,35.9672,6961.24,4716.53,2245.89
-Add11,13.2849,62.5018,41.4729,21.0289,9426.61,6255.38,3171.23,9.39853,44.3375,29.446,14.8999,6690.48,4443.87,2247.88
-Add12,13.3116,62.903,42.3375,20.5655,9473.31,6376.28,3097.03,9.4262,44.5564,30.0016,14.5585,6714.71,4521.5,2193.76
-Add13,13.2261,60.7273,40.0247,20.7027,9184.29,6053.35,3130.94,9.35469,43.0083,28.3589,14.6532,6503.87,4288.67,2215.77
-Add14,18.2347,85.0395,55.6313,29.4083,9327.04,6101.58,3225.47,12.8946,60.2002,39.3989,20.8069,6602.17,4320.89,2281.89
-Add15,13.201,61.5741,40.8812,20.6929,9329.61,6194.38,3135.23,9.33612,43.6009,28.9576,14.646,6605.87,4387.49,2218.81
-Add16,13.1048,61.3472,41.1577,20.1895,9363.36,6282.02,3081.34,9.26961,43.4341,29.1451,14.2906,6627.73,4447.56,2180.42
-Add17,18.074,85.9011,57.2285,28.6726,9505.77,6332.9,3172.88,12.7825,60.7891,40.5063,20.2854,6725.93,4481.8,2244.41
-Add18,13.0014,61.4375,41.4414,19.9961,9451.79,6375.57,3076.22,9.19633,43.4954,29.3433,14.1535,6689.95,4513.32,2176.85
-Add19,12.9281,61.0404,41.4361,19.6043,9443.71,6410.78,3032.93,9.14237,43.1927,29.3243,13.8701,6682.35,4536.91,2145.7
-Add2,21.6463,74.901,45.7092,29.1918,6924.08,4226.11,2697.96,15.3072,54.6989,34.0441,20.8568,5058.68,3149.19,1928.09
-Add20,18.0244,86.3285,58.1531,28.1754,9582.35,6454.96,3127.39,12.7469,61.1622,41.2057,19.9583,6788.91,4573.83,2215.28
-Add21,11.443,53.5959,35.9335,17.6624,9368.34,6281.11,3087.23,8.0933,37.9341,25.4348,12.5,6629.85,4445.42,2184.56
-Add22,11.2802,53.8629,36.7609,17.102,9550.59,6518.28,3032.31,7.97721,38.1207,26.0183,12.103,6758.92,4613.25,2145.78
-Add23,11.4043,53.4182,36.0306,17.3876,9367.74,6318.51,3049.23,8.06561,37.8152,25.5087,12.3072,6629.92,4472.24,2157.8
-Add24,14.1964,67.2674,45.1882,22.0792,9476.37,6365.9,3110.47,10.0397,47.6016,31.9802,15.6223,6704.74,4504.4,2200.47
-Add25,11.4019,54.4409,37.1263,17.3146,9550.12,6512.81,3037.31,8.06306,38.5289,26.2759,12.2533,6758.64,4609.3,2149.39
-Add26,11.389,54.7724,37.8032,16.9691,9618.53,6638.66,2979.88,8.05392,38.7645,26.7556,12.0092,6806.88,4698.25,2108.68
-Add27,14.534,71.0934,48.8721,22.2213,9782.97,6725.16,3057.81,10.2779,50.3048,34.583,15.7223,6921.71,4758.46,2163.32
-Add28,11.3928,55.6158,38.6494,16.9664,9763.99,6785.38,2978.61,8.05735,39.362,27.355,12.0073,6909.78,4802.07,2107.78
-Add29,11.3734,55.2916,38.7026,16.589,9723.57,6806.29,2917.28,8.04323,39.1303,27.3912,11.7396,6881.02,4816.78,2064.32
-Add3,21.6875,87.1673,56.8165,30.3508,8044.5,5244.29,2800.21,15.3377,62.5303,41.0116,21.5851,5774.2,3788.18,1992.12
-Add30,14.737,73.1106,50.9526,22.158,9922.47,6915.21,3007.26,10.4237,51.7335,36.0556,15.6783,7019.51,4892.23,2127.34
-Add31,0.224655,0.0762958,0.0528715,0.0234243,659.266,456.971,202.295,0.159906,0.176819,0.122519,0.0543026,1530.64,1061.12,469.546
-Add4,31.9722,131.604,84.4071,47.197,8233.73,5281.05,2952.68,22.61,94.3563,60.9261,33.5436,5903.71,3812.26,2098.54
-Add5,21.6558,98.7438,66.2685,32.4754,9121.5,6121.82,2999.68,15.3155,70.2815,47.2673,23.0367,6492.68,4366.94,2127.81
-Add6,21.6444,102.842,70.2169,32.6251,9505.17,6490.1,3015.08,15.3068,73.0591,49.9573,23.1196,6753.39,4618.34,2136.68
-Add7,31.9182,154.555,104.005,50.5501,9684.68,6517.16,3167.52,22.5717,109.752,73.9651,35.8127,6876.69,4634.46,2243.88
-Add8,21.6322,107.553,73.8345,33.7187,9945.81,6827.96,3117.85,15.2981,76.4125,52.5414,23.892,7066.74,4859.42,2209.25
-Add9,21.8129,104.865,72.0378,32.8272,9617.72,6607.25,3010.47,15.4261,74.3584,51.1311,23.241,6820.84,4690.62,2131.47
-Conv1,96.2953,255.037,132.22,122.817,5159.09,2638.34,2520.75,68.2526,204.018,117.903,90.0101,4055.42,2323.39,1823.93
-Conv10,70.4671,340.841,226.917,113.924,9673.98,6440.5,3233.48,49.8536,241.418,160.812,80.6306,6848.59,4561.91,2287.37
-Conv11,129.334,614.475,398.927,215.548,9503.33,6169.85,3333.48,91.4697,434.917,282.486,152.474,6725.99,4368.85,2357.8
-Conv12,129.068,617.019,406.726,210.293,9562.46,6303.54,3258.92,91.2758,436.559,287.85,148.736,6765.83,4461.37,2304.88
-Conv13,129.135,621.204,409.93,211.274,9622.23,6349.82,3272.41,91.3223,439.472,290.061,149.429,6807.38,4493.24,2314.43
-Conv14,129.174,625.346,417.91,207.436,9683.85,6471.74,3212.11,91.3536,442.335,295.64,146.707,6849.89,4578.44,2271.64
-Conv15,61.1393,299.396,199.643,99.7523,9796.81,6532.83,3263.98,43.2444,211.769,141.223,70.5503,6929.6,4621.32,2308.42
-Conv16,84.5931,417.41,280.311,137.099,9869.83,6628.22,3241.61,59.8259,295.223,198.268,96.9599,6980.4,4688.15,2292.36
-Conv17,36.7136,180.084,122.113,57.9714,9812.69,6654.16,3158.53,25.9732,127.397,86.3877,41.0109,6940.16,4706.54,2233.71
-Conv18,84.6165,415.961,279.353,136.608,9832.95,6603.82,3229.13,59.8443,294.189,197.58,96.6131,6953.97,4670.58,2283.48
-Conv19,84.5493,420.587,286.448,134.139,9950.05,6776.78,3173.27,59.7919,297.44,202.583,94.8602,7036.73,4792.81,2243.99
-Conv2,167.13,533.192,304.506,228.686,6388.94,3650.52,2738.43,118.195,395.752,235.038,163.802,4746.52,2820.79,1962.43
-Conv20,84.5033,424.692,291.118,133.574,10052.5,6890.93,3161.61,59.759,300.336,205.879,94.46,7109.04,4873.38,2235.73
-Conv21,84.5829,428.664,296.983,131.681,10137.1,7023.19,3113.87,59.8165,303.148,210.028,93.1228,7168.75,4966.83,2201.97
-Conv3,167.964,647.325,407.112,240.213,7707.18,4847.1,2860.08,118.79,467.662,297.418,171.175,5566.56,3540.11,2037.55
-Conv4,167.592,744.048,484.83,259.218,8881.63,5787.69,3093.94,118.526,530.921,347.314,183.963,6338.14,4146.67,2195.7
-Conv5,177.005,835.874,561.072,274.802,9500.55,6383.92,3116.64,125.364,591.914,397.787,194.351,6756.17,4549.8,2208.86
-Conv6,218.988,1050.1,698.053,352.047,9847.98,6588.16,3259.83,159.09,748.601,496.158,252.938,7006.45,4702.25,2308.95
-Conv7,264.136,1280.25,861.007,419.244,9708.54,6531.25,3177.29,186.823,907.588,611.208,296.66,6890.93,4643.37,2249.66
-Conv8,100.507,490.584,323.717,166.867,9761,6440.68,3320.32,71.0869,347.741,229.7,118.109,6916.34,4568.28,2349.42
-Conv9,129.091,627.118,413.499,213.62,9716.34,6406.64,3309.7,91.291,444.149,293.072,151.141,6880.99,4540.5,2341.49
-Mul1,0.500934,0.486919,0.337278,0.14964,1850.52,1282.1,568.424,0.361084,0.684926,0.474355,0.210576,2540.71,1760.3,780.419
-Pool1,7.47447,35.1354,24.3713,10.764,9455.2,6558.7,2896.5,5.30974,24.9294,17.2922,7.63735,6710.44,4654.93,2055.55
-Relu1,19.1763,53.4017,28.007,25.3948,5567.29,2919.23,2648.06,13.5622,40.6992,23.0638,18.2718,4241.1,2402.95,1904.65
-Relu10,9.40207,43.1235,28.5091,14.6144,9172.92,6064.26,3108.66,6.64869,30.5546,20.2073,10.3494,6498.77,4297.98,2201.24
-Relu11,9.41748,43.1252,28.5286,14.5966,9164.63,6062.75,3101.88,6.66047,30.6133,20.2564,10.3587,6504.87,4304.29,2200.96
-Relu12,9.36322,43.724,29.3495,14.3744,9339.53,6269.12,3070.41,6.6214,30.9587,20.7838,10.1759,6612.24,4439.05,2173.4
-Relu13,9.40974,43.6635,29.1918,14.4716,9280.16,6204.35,3075.81,6.6551,30.9301,20.6807,10.25,6572.25,4394.33,2178.05
-Relu14,4.80981,20.8303,13.9278,6.90257,8662.5,5792.07,2870.43,3.40296,14.8309,9.91719,4.91395,6164.1,4121.99,2042.24
-Relu15,4.87483,20.4251,13.6704,6.7547,8463.01,5664.4,2798.61,3.50721,14.5206,9.71904,4.80188,6020.04,4029.63,1990.55
-Relu16,4.86403,20.6177,14.0222,6.5955,8564.49,5824.72,2739.77,3.49763,14.6509,9.9643,4.68671,6092.52,4143.6,1948.97
-Relu17,4.81198,21.0221,14.3894,6.63266,8734.67,5978.78,2755.88,3.40391,14.9788,10.2531,4.72591,6219.34,4257.18,1962.25
-Relu18,4.8106,21.1957,14.6867,6.509,8812.82,6106.5,2706.32,3.40271,15.0581,10.4344,4.62392,6259.54,4337.51,1922.11
-Relu19,4.81593,21.5459,14.9597,6.58625,8949.06,6213.41,2735.65,3.40702,15.3081,10.6291,4.67908,6356.15,4413.28,1942.92
-Relu2,19.0626,66.8226,40.3526,26.4701,7014.02,4236.15,2777.88,13.4814,48.633,29.9128,18.8899,5105.81,3141.06,1982.51
-Relu3,19.0941,78.945,49.9544,28.9906,8264.08,5228.56,3035.53,13.5031,56.5704,36.0347,20.6026,5917.81,3768.6,2156.24
-Relu4,19.1928,87.579,58.1748,29.4042,9138.5,6070.31,3068.19,13.5876,62.3198,41.4872,20.8544,6504.83,4330.45,2176.66
-Relu5,19.1354,92.4142,61.363,31.0513,9656.77,6411.83,3244.94,13.5331,65.6453,43.6566,22.0048,6856.9,4559.75,2298.84
-Relu6,19.1447,95.3407,64.7883,30.5524,9960.4,6768.59,3191.81,13.5399,67.727,46.0989,21.6475,7074.5,4815.39,2261.14
-Relu7,19.0951,93.1857,62.2371,30.9487,9757.72,6516.69,3241.03,13.5037,66.0793,44.1769,21.9139,6916.8,4623.74,2294.28
-Relu8,9.48155,43.6732,28.8388,14.8344,9253.12,6110.32,3142.8,6.73217,30.9767,20.4743,10.5085,6568.69,4341.77,2228.2
-Relu9,9.39785,42.293,27.4628,14.8302,8999.41,5843.66,3155.75,6.64609,29.965,19.467,10.5012,6374.39,4141.04,2234.03
-Softmax1,2.04054,7.19751,4.98957,2.20793,7058.19,4893.15,2165.04,1.44402,5.29548,3.67114,1.62439,5192.94,3600.29,1592.7
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_layers.txt b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_layers.txt
deleted file mode 100644
index 6837e87207b24eec8c1913275aa742824a67f74f..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_layers.txt
+++ /dev/null
@@ -1,41 +0,0 @@
-Conv1,2000,3,32,32,16,3,3,3,1,1
-Conv2,2000,16,32,32,16,16,3,3,1,1
-Conv3,2000,16,32,32,16,16,3,3,1,1
-NML1
-NML2
-Conv4,2000,16,32,32,16,16,3,3,1,1
-Conv5,2000,16,32,32,16,16,3,3,1,1
-NML3
-NML4
-Conv6,2000,16,32,32,16,16,3,3,1,1
-Conv7,2000,16,32,32,16,16,3,3,1,1
-NML5
-NML6
-Conv8,2000,16,32,32,32,16,3,3,2,2
-Conv9,2000,32,16,16,32,32,3,3,1,1
-Conv10,2000,16,32,32,32,16,1,1,2,2
-NML7
-NML8
-Conv11,2000,32,16,16,32,32,3,3,1,1
-Conv12,2000,32,16,16,32,32,3,3,1,1
-NML9
-NML10
-Conv13,2000,32,16,16,32,32,3,3,1,1
-Conv14,2000,32,16,16,32,32,3,3,1,1
-NML11
-NML12
-Conv15,2000,32,16,16,64,32,3,3,2,2
-Conv16,2000,64,8,8,64,64,3,3,1,1
-Conv17,2000,32,16,16,64,32,1,1,2,2
-NML13
-NML14
-Conv18,2000,64,8,8,64,64,3,3,1,1
-Conv19,2000,64,8,8,64,64,3,3,1,1
-NML15
-NML16
-Conv20,2000,64,8,8,64,64,3,3,1,1
-Conv21,2000,64,8,8,64,64,3,3,1,1
-NML17
-NML18
-NML19
-FC1,2000,64,64,10
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_ops.txt b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_ops.txt
deleted file mode 100644
index 86795a48547725b624c69f8768a4f7e53103d623..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_ops.txt
+++ /dev/null
@@ -1,114 +0,0 @@
-#Conv1,3
-Conv1
-Add1
-Relu1
-#Conv2,3
-Conv2
-Add2
-Relu2
-#Conv3,2
-Conv3
-Add3
-#NML1,1
-Add4
-#NML2,1
-Relu3
-#Conv4,3
-Conv4
-Add5
-Relu4
-#Conv5,2
-Conv5
-Add6
-#NML3,1
-Add7
-#NML4,1
-Relu5
-#Conv6,3
-Conv6
-Add8
-Relu6
-#Conv7,2
-Conv7
-Add9
-#NML5,1
-Add10
-#NML6,1
-Relu7
-#Conv8,3
-Conv8
-Add11
-Relu8
-#Conv9,2
-Conv9
-Add12
-#Conv10,2
-Conv10
-Add13
-#NML7,1
-Add14
-#NML8,1
-Relu9
-#Conv11,3
-Conv11
-Add15
-Relu10
-#Conv12,2
-Conv12
-Add16
-#NML9,1
-Add17
-#NML10,1
-Relu11
-#Conv13,3
-Conv13
-Add18
-Relu12
-#Conv14,2
-Conv14
-Add19
-#NML11,1
-Add20
-#NML12,1
-Relu13
-#Conv15,3
-Conv15
-Add21
-Relu14
-#Conv16,2
-Conv16
-Add22
-#Conv17,2
-Conv17
-Add23
-#NML13,1
-Add24
-#NML14,1
-Relu15
-#Conv18,3
-Conv18
-Add25
-Relu16
-#Conv19,2
-Conv19
-Add26
-#NML15,1
-Add27
-#NML16,1
-Relu17
-#Conv20,3
-Conv20
-Add28
-Relu18
-#Conv21,2
-Conv21
-Add29
-#NML17,1
-Add30
-#NML18,1
-Relu19
-#NML19,1
-Pool1
-#FC1,2
-Mul1
-Add31
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_confs1.txt b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_confs1.txt
deleted file mode 100644
index 0f382344773bc67d7a63545f5441e02e17c8025d..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_confs1.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-9 9 9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9,9 9
-9 9 9,9 9 9,9 9,8,8,9 9 9,7,8,8,9 9 9,7,8,8,7,8 8,9 9,8,8,8 8 8,8 8,8,8,9 9 9,8 8,8,8,7,9 9,8 8,8,8,8 8 8,8 8,8,8,9 9 9,8 8,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,8 8 8,7,8,8,7,9 9,8,8,9 9 9,7,9 9,8,8,7,8 8,8,8,9 9 9,9 9,8,8,9 9 9,7,8 8,8,8,9 9 9,9 9,8,8,9 9 9,8 8,8,8,8,7
-8 8 8,8 8 8,7,8,8,8 8 8,8 8,8,8,9 9 9,7,8,8,9 9 9,9 9,9 9,8,8,8 8 8,8 8,8,8,9 9 9,9 9,8,8,7,7,9 9,8,8,9 9 9,8 8,8,8,7,9 9,8,8,8,9 9
-8 8 8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8 8 8,8 8,8,8,9 9 9,8 8,9 9,8,8,9 9 9,8 8,8,8,8 8 8,8 8,8,8,9 9 9,8 8,9 9,8,8,9 9 9,9 9,8,8,8 8 8,7,8,8,8,9 9
-9 9 9,8 8 8,7,8,8,7,8 8,8,8,8 8 8,9 9,8,8,7,8 8,8 8,8,8,8 8 8,8 8,8,8,9 9 9,8 8,8,8,9 9 9,8 8,9 9,8,8,9 9 9,9 9,8,8,9 9 9,9 9,8,8,8,7
-8 8 8,8 8 8,7,8,8,7,8 8,8,8,7,8 8,8,8,9 9 9,9 9,8 8,8,8,9 9 9,8 8,8,8,8 8 8,7,8,8,8 8 8,9 9,7,8,8,9 9 9,9 9,8,8,9 9 9,9 9,8,8,8,8 8
-8 8 8,8 8 8,7,8,8,9 9 9,7,8,8,7,8 8,8,8,9 9 9,7,7,8,8,7,8 8,8,8,9 9 9,8 8,8,8,9 9 9,9 9,8 8,8,8,9 9 9,9 9,8,8,9 9 9,8 8,8,8,8,7
-8 8 8,8 8 8,7,8,8,9 9 9,8 8,8,8,8 8 8,9 9,8,8,9 9 9,8 8,7,8,8,9 9 9,8 8,8,8,9 9 9,9 9,8,8,8 8 8,8 8,7,8,8,8 8 8,7,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,9 9 9,7,8,8,8 8 8,8 8,8,8,8 8 8,9 9,8,8,9 9 9,8 8,8 8,8,8,8 8 8,9 9,8,8,7,9 9,8,8,9 9 9,8 8,7,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-9 9 9,8 8 8,9 9,8,8,7,7,8,8,7,9 9,8,8,8 8 8,7,9 9,8,8,8 8 8,9 9,8,8,8 8 8,9 9,8,8,8 8 8,9 9,7,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,8 8 8,7,8,8,7,9 9,8,8,9 9 9,7,9 9,8,8,7,9 9,8,8,9 9 9,8 8,8,8,8 8 8,9 9,8 8,8,8,9 9 9,9 9,8,8,9 9 9,8 8,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,7,7,8,8,7,7,8,8,8 8 8,9 9,8 8,8,8,9 9 9,9 9,8,8,8 8 8,8 8,8,8,9 9 9,9 9,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,7
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_confs2.txt b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_confs2.txt
deleted file mode 100644
index 25db61b421ddaf79a95c0b645fbcd381a7f07dc0..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_confs2.txt
+++ /dev/null
@@ -1,32 +0,0 @@
-9 9 9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9 9,9,9,9 9 9,9 9,9,9,9 9 9,9 9,9,9,9,9 9
-9 9 9,9 9 9,9 9,8,8,9 9 9,7,8,8,9 9 9,7,8,8,7,8 8,9 9,8,8,8 8 8,8 8,8,8,9 9 9,8 8,8,8,7,9 9,8 8,8,8,8 8 8,8 8,8,8,9 9 9,8 8,8,8,8,9 9
-9 9 9,9 9 9,7,8,8,9 9 9,7,8,8,9 9 9,9 9,8,8,7,8 8,9 9,8,8,7,7,8,8,9 9 9,8 8,8,8,9 9 9,9 9,8 8,8,8,8 8 8,9 9,8,8,8 8 8,8 8,8,8,8,7
-8 8 8,9 9 9,7,8,8,7,9 9,8,8,7,7,8,8,9 9 9,9 9,8 8,8,8,9 9 9,7,8,8,8 8 8,7,8,8,8 8 8,7,9 9,8,8,9 9 9,9 9,8,8,9 9 9,9 9,8,8,8,8 8
-8 8 8,8 8 8,8 8,8,8,7,9 9,8,8,7,7,8,8,7,9 9,8 8,8,8,9 9 9,8 8,8,8,8 8 8,8 8,8,8,8 8 8,7,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,8 8 8,8 8,8,8,7,7,8,8,7,7,8,8,8 8 8,9 9,9 9,8,8,9 9 9,8 8,8,8,9 9 9,8 8,8,8,7,7,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,8 8 8,7,8,8,7,9 9,8,8,9 9 9,7,9 9,8,8,7,8 8,8,8,9 9 9,9 9,8,8,9 9 9,7,8 8,8,8,9 9 9,9 9,8,8,9 9 9,8 8,8,8,8,7
-8 8 8,8 8 8,8 8,8,8,7,7,8,8,7,7,8,8,8 8 8,9 9,8 8,8,8,9 9 9,7,8,8,9 9 9,8 8,8,8,8 8 8,7,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,7,7,8,8,7,7,8,8,8 8 8,9 9,8 8,8,8,9 9 9,7,8,8,8 8 8,8 8,8,8,7,7,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,8 8
-8 8 8,8 8 8,7,8,8,8 8 8,8 8,8,8,9 9 9,7,8,8,9 9 9,9 9,9 9,8,8,8 8 8,8 8,8,8,9 9 9,9 9,8,8,7,7,9 9,8,8,9 9 9,8 8,8,8,7,9 9,8,8,8,9 9
-8 8 8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8 8 8,8 8,8,8,9 9 9,8 8,9 9,8,8,9 9 9,8 8,8,8,8 8 8,8 8,8,8,9 9 9,8 8,9 9,8,8,9 9 9,9 9,8,8,8 8 8,7,8,8,8,9 9
-9 9 9,9 9 9,7,8,8,7,8 8,8,8,9 9 9,9 9,8,8,8 8 8,8 8,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,9 9 9,8 8,9 9,8,8,9 9 9,8 8,8,8,9 9 9,7,8,8,8,7
-8 8 8,9 9 9,8 8,8,8,7,9 9,8,8,8 8 8,9 9,8,8,9 9 9,7,7,8,8,8 8 8,7,8,8,9 9 9,8 8,8,8,9 9 9,7,9 9,8,8,8 8 8,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,9 9 9,7,8,8,7,9 9,8,8,9 9 9,9 9,8,8,8 8 8,7,7,8,8,9 9 9,7,8,8,9 9 9,8 8,8,8,7,8 8,9 9,8,8,8 8 8,9 9,8,8,8 8 8,8 8,8,8,8,8 8
-9 9 9,8 8 8,7,8,8,7,8 8,8,8,8 8 8,9 9,8,8,7,8 8,8 8,8,8,8 8 8,8 8,8,8,9 9 9,8 8,8,8,9 9 9,8 8,9 9,8,8,9 9 9,9 9,8,8,9 9 9,9 9,8,8,8,7
-8 8 8,8 8 8,7,8,8,7,8 8,8,8,7,8 8,8,8,9 9 9,9 9,8 8,8,8,9 9 9,8 8,8,8,8 8 8,7,8,8,8 8 8,9 9,7,8,8,9 9 9,9 9,8,8,9 9 9,9 9,8,8,8,8 8
-8 8 8,9 9 9,8 8,8,8,7,7,8,8,7,7,8,8,8 8 8,9 9,8 8,8,8,9 9 9,7,8,8,8 8 8,8 8,8,8,9 9 9,8 8,7,8,8,8 8 8,9 9,8,8,8 8 8,8 8,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,9 9 9,7,8,8,7,8 8,8,8,9 9 9,7,7,8,8,7,8 8,8,8,9 9 9,8 8,8,8,9 9 9,9 9,8 8,8,8,9 9 9,9 9,8,8,9 9 9,8 8,8,8,8,7
-8 8 8,8 8 8,7,8,8,9 9 9,7,8,8,9 9 9,7,8,8,8 8 8,9 9,8 8,8,8,9 9 9,7,8,8,8 8 8,8 8,8,8,7,7,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,9 9 9,8 8,8,8,8 8 8,9 9,8,8,9 9 9,8 8,7,8,8,9 9 9,8 8,8,8,9 9 9,9 9,8,8,8 8 8,8 8,7,8,8,8 8 8,7,8,8,8 8 8,9 9,8,8,8,9 9
-9 9 9,9 9 9,7,8,8,8 8 8,7,8,8,8 8 8,9 9,8,8,8 8 8,8 8,9 9,8,8,8 8 8,7,8,8,9 9 9,9 9,8,8,7,8 8,8 8,8,8,8 8 8,9 9,8,8,8 8 8,9 9,8,8,8,7
-9 9 9,9 9 9,7,8,8,8 8 8,9 9,8,8,9 9 9,9 9,8,8,9 9 9,9 9,8 8,8,8,8 8 8,7,8,8,8 8 8,8 8,8,8,7,8 8,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,7,7,8,8,7,7,8,8,7,7,9 9,8,8,9 9 9,9 9,8,8,8 8 8,8 8,8,8,8 8 8,8 8,9 9,8,8,9 9 9,9 9,8,8,9 9 9,9 9,8,8,8,7
-8 8 8,8 8 8,8 8,8,8,9 9 9,7,8,8,7,7,8,8,8 8 8,9 9,8 8,8,8,9 9 9,7,8,8,8 8 8,8 8,8,8,8 8 8,8 8,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,7,7,8,8,8 8 8,7,8,8,9 9 9,9 9,9 9,8,8,9 9 9,7,8,8,8 8 8,9 9,8,8,8 8 8,7,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,9 9 9,7,8,8,8 8 8,8 8,8,8,8 8 8,9 9,8,8,9 9 9,8 8,8 8,8,8,8 8 8,9 9,8,8,7,9 9,8,8,9 9 9,8 8,7,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-9 9 9,8 8 8,9 9,8,8,7,7,8,8,7,9 9,8,8,8 8 8,7,9 9,8,8,8 8 8,9 9,8,8,8 8 8,9 9,8,8,8 8 8,9 9,7,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-9 9 9,8 8 8,7,8,8,7,7,8,8,7,7,8,8,7,7,8 8,8,8,9 9 9,9 9,8,8,9 9 9,8 8,8,8,9 9 9,8 8,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,8 8 8,7,8,8,7,9 9,8,8,9 9 9,7,9 9,8,8,7,9 9,8,8,9 9 9,8 8,8,8,8 8 8,9 9,8 8,8,8,9 9 9,9 9,8,8,9 9 9,8 8,8,8,8,9 9
-8 8 8,8 8 8,8 8,8,8,9 9 9,7,8,8,9 9 9,7,8,8,9 9 9,8 8,7,8,8,7,8 8,8,8,8 8 8,8 8,8,8,8 8 8,9 9,8 8,8,8,8 8 8,9 9,8,8,9 9 9,9 9,8,8,8,9 9
-8 8 8,8 8 8,7,8,8,7,7,8,8,7,7,8,8,8 8 8,9 9,8 8,8,8,9 9 9,9 9,8,8,8 8 8,8 8,8,8,9 9 9,9 9,9 9,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,7
-9 9 9,8 8 8,9 9,8,8,8 8 8,7,8,8,8 8 8,7,8,8,9 9 9,7,7,8,8,8 8 8,9 9,8,8,8 8 8,9 9,8,8,8 8 8,9 9,7,8,8,9 9 9,9 9,8,8,8 8 8,9 9,8,8,8,9 9
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_results1.csv b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_results1.csv
deleted file mode 100644
index 43efb1d48e791b041d332cb015d671f111dae293..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_results1.csv
+++ /dev/null
@@ -1,187 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,367.1935,674.9156,734.4923,131.604,78.945,930.3708,938.716,154.555,92.4142,1252.9937,1385.115,157.253,93.1857,596.759,690.021,401.5683,85.0395,42.293,719.1726,678.3662,85.9011,43.1252,726.3655,686.3864,86.3285,43.6635,373.8222,471.2729,233.5022,67.2674,20.4251,491.0196,475.3594,71.0934,21.0221,501.5035,483.9556,73.1106,21.5459,35.1354,0.5632148,15217.3421148,0.99999999342855
-c1,367.1935,674.9156,734.4923,109.339,74.5864,930.3708,17.457054,108.541,77.2477,1252.9937,17.457054,107.72,72.987,8.728527,648.5595,401.5683,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,6.546395,471.2729,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,501.5035,382.9785,50.3589,22.1017,23.3959,0.5632148,11291.7676448,1.34764923072455
-c2,520.638,1160.9424,17.457054,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,17.457054,1385.115,107.72,72.987,596.759,13.092790,401.5683,49.588,38.5238,13.092790,737.6456,49.8165,38.8844,726.3655,686.3864,49.6507,39.5474,373.8222,10.910659,221.6533,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,382.9785,50.3589,22.1017,23.3959,0.005455,11365.706356,1.33888220443761
-c3,520.638,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1252.9937,17.457054,107.72,72.987,596.759,690.021,401.5683,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,726.3655,686.3864,49.6507,39.5474,6.546395,10.910659,233.5022,49.7509,21.8376,491.0196,382.2379,50.4854,21.963,10.910659,483.9556,50.3589,22.1017,23.3959,0.5632148,13253.8246358,1.1481472267923
-c4,520.638,674.9156,734.4923,109.339,74.5864,1558.1515,938.716,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,596.759,648.5595,401.5683,49.588,38.5238,719.1726,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,373.8222,326.6018,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,10.910659,50.3589,22.1017,23.3959,0.5632148,15686.5261738,0.970089989918059
-c5,367.1935,1160.9424,17.457054,109.339,74.5864,17.457054,1543.1258,108.541,77.2477,1693.1477,1385.115,107.72,72.987,8.728527,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,373.8222,326.6018,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,483.9556,50.3589,22.1017,23.3959,0.005455,14074.59519,1.08119216227922
-c6,520.638,1160.9424,17.457054,109.339,74.5864,17.457054,1543.1258,108.541,77.2477,17.457054,1601.9877,107.72,72.987,596.759,690.021,422.944,49.588,38.5238,719.1726,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,471.2729,2.182132,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,483.9556,50.3589,22.1017,23.3959,6.45743,12536.860114,1.21380807116336
-c7,520.638,1160.9424,17.457054,109.339,74.5864,930.3708,17.457054,108.541,77.2477,17.457054,1601.9877,107.72,72.987,596.759,13.092790,4.364263,49.588,38.5238,13.092790,737.6456,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,373.8222,471.2729,221.6533,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,382.9785,50.3589,22.1017,23.3959,0.005455,11067.04686,1.37501378369501
-c8,520.638,1160.9424,17.457054,109.339,74.5864,930.3708,1543.1258,108.541,77.2477,1693.1477,1385.115,107.72,72.987,596.759,648.5595,4.364263,49.588,38.5238,719.1726,737.6456,49.8165,38.8844,726.3655,686.3864,49.6507,39.5474,226.8183,326.6018,2.182132,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,14218.4527228,1.07025302291669
-c9,520.638,674.9156,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1385.115,107.72,72.987,596.759,648.5595,422.944,49.588,38.5238,668.3418,678.3662,49.8165,38.8844,13.092790,686.3864,49.6507,39.5474,373.8222,326.6018,2.182132,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,14710.7549908,1.03443650722707
-c10,367.1935,1160.9424,734.4923,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,1385.115,107.72,72.987,452.0318,13.092790,401.5683,49.588,38.5238,668.3418,678.3662,49.8165,38.8844,763.2659,686.3864,49.6507,39.5474,226.8183,471.2729,2.182132,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,10964.5893988,1.38786245636149
-c11,520.638,1160.9424,17.457054,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,17.457054,1385.115,107.72,72.987,596.759,13.092790,401.5683,49.588,38.5238,13.092790,678.3662,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,226.8183,471.2729,221.6533,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,382.9785,50.3589,22.1017,23.3959,0.5632148,11669.4333568,1.30403435360716
-c12,520.638,1160.9424,17.457054,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,17.457054,107.72,72.987,452.0318,690.021,422.944,49.588,38.5238,719.1726,678.3662,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,373.8222,471.2729,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.005455,10209.331925,1.49053259092139
-c12,10209.331925
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,137.165,207.8389,189.6515,31.9722,19.0941,208.4406,198.6494,31.9182,19.1354,259.7649,285.9489,32.0305,19.0951,123.27345,142.4026,83.6932,18.2347,9.39785,151.93707,142.1728,18.074,9.41748,151.49962,142.1021,18.0244,9.40974,77.39211,95.8733,48.1179,14.1964,4.87483,100.88243,95.9383,14.534,4.81198,100.7067,95.9563,14.737,4.81593,7.47447,0.725589,3341.380949,0.999999970072255
-c1,137.165,207.8389,189.6515,25.2021,16.7972,208.4406,9.024416,24.9686,17.3272,259.7649,9.024416,25.4775,16.8139,4.555456,148.08761,83.6932,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,3.459840,95.8733,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,100.7067,89.01816,11.2081,4.48848,5.19545,0.725589,2667.980987,1.25240053810022
-c2,208.7505,342.9757,9.024416,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,9.024416,285.9489,25.4775,16.8139,123.27345,6.804352,83.6932,11.4693,8.57887,6.804352,176.87812,11.2953,8.59732,151.49962,142.1021,11.2352,8.56125,77.39211,5.766400,50.93214,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,89.01816,11.2081,4.48848,5.19545,0.003604,2671.905996,1.25056077157885
-c3,208.7505,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,259.7649,9.024416,25.4775,16.8139,123.27345,142.4026,83.6932,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,151.49962,142.1021,11.2352,8.56125,3.459840,5.766400,48.1179,11.2873,4.46626,100.88243,89.0888,11.1843,4.46968,5.766400,95.9563,11.2081,4.48848,5.19545,0.725589,3111.982111,1.07371466880149
-c4,208.7505,207.8389,189.6515,25.2021,16.7972,356.8393,198.6494,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,123.27345,148.08761,83.6932,11.4693,8.57887,151.93707,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,77.39211,74.50821,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,5.766400,11.2081,4.48848,5.19545,0.725589,3723.591349,0.897354340497627
-c5,137.165,342.9757,9.024416,25.2021,16.7972,9.024416,359.5265,24.9686,17.3272,399.7504,285.9489,25.4775,16.8139,4.555456,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,77.39211,74.50821,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,95.9563,11.2081,4.48848,5.19545,0.003604,3290.311852,1.01552101981363
-c6,208.7505,342.9757,9.024416,25.2021,16.7972,9.024416,359.5265,24.9686,17.3272,9.024416,388.4166,25.4775,16.8139,123.27345,142.4026,99.91022,11.4693,8.57887,151.93707,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,95.8733,1.153280,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,95.9563,11.2081,4.48848,5.19545,1.571852,2991.287152,1.11703780596996
-c7,208.7505,342.9757,9.024416,25.2021,16.7972,208.4406,9.024416,24.9686,17.3272,9.024416,388.4166,25.4775,16.8139,123.27345,6.804352,2.306560,11.4693,8.57887,6.804352,176.87812,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,77.39211,95.8733,50.93214,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,89.01816,11.2081,4.48848,5.19545,0.003604,2668.589466,1.25211497173335
-c8,208.7505,342.9757,9.024416,25.2021,16.7972,208.4406,359.5265,24.9686,17.3272,399.7504,285.9489,25.4775,16.8139,123.27345,148.08761,2.306560,11.4693,8.57887,151.93707,176.87812,11.2953,8.59732,151.49962,142.1021,11.2352,8.56125,51.58494,74.50821,1.153280,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,3349.738715,0.997504920096285
-c9,208.7505,207.8389,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,285.9489,25.4775,16.8139,123.27345,148.08761,99.91022,11.4693,8.57887,157.84032,142.1728,11.2953,8.59732,6.804352,142.1021,11.2352,8.56125,77.39211,74.50821,1.153280,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,3423.875657,0.975906015914467
-c10,137.165,342.9757,189.6515,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,285.9489,25.4775,16.8139,104.7504,6.804352,83.6932,11.4693,8.57887,157.84032,142.1728,11.2953,8.59732,181.04838,142.1021,11.2352,8.56125,51.58494,95.8733,1.153280,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2572.789709,1.2987384112419
-c11,208.7505,342.9757,9.024416,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,9.024416,285.9489,25.4775,16.8139,123.27345,6.804352,83.6932,11.4693,8.57887,6.804352,142.1728,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,51.58494,95.8733,50.93214,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,89.01816,11.2081,4.48848,5.19545,0.725589,2736.116601,1.22121287727924
-c12,208.7505,342.9757,9.024416,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,104.7504,142.4026,99.91022,11.4693,8.57887,151.93707,142.1728,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,77.39211,95.8733,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.003604,2438.679674,1.37015978261031
-c12,2438.679674
-
-Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,367.1935,674.9156,734.4923,131.604,78.945,930.3708,938.716,154.555,92.4142,1252.9937,1385.115,157.253,93.1857,596.759,690.021,401.5683,85.0395,42.293,719.1726,678.3662,85.9011,43.1252,726.3655,686.3864,86.3285,43.6635,373.8222,471.2729,233.5022,67.2674,20.4251,491.0196,475.3594,71.0934,21.0221,501.5035,483.9556,73.1106,21.5459,35.1354,0.5632148,15217.3421148,0.99999999342855
-c1,367.1935,674.9156,734.4923,336.544,74.5864,1021.7777,67.644319,108.541,77.2477,1342.9017,67.644319,107.72,72.987,29.480448,648.5595,442.3803,159.941,38.5238,668.3418,737.6456,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,18.903715,471.2729,291.5164,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,521.315,413.8585,50.3589,22.1017,23.3959,0.6085782,12207.0880792,1.24659885235606
-c2,533.7291,1160.9424,67.644319,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,67.644319,1385.115,351.059,72.987,639.1792,42.156294,401.5683,159.941,38.5238,42.156294,737.6456,49.8165,38.8844,768.7706,686.3864,175.4817,39.5474,393.7426,29.560412,221.6533,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,521.315,413.8585,50.3589,22.1017,23.3959,0.08824,12331.227597,1.23404923570604
-c3,533.7291,1160.9424,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1342.9017,67.644319,107.72,72.987,639.1792,690.021,401.5683,159.941,38.5238,668.3418,737.6456,49.8165,38.8844,768.7706,686.3864,175.4817,39.5474,18.903715,29.560412,233.5022,106.94,21.8376,511.0788,413.5184,50.4854,21.963,29.560412,483.9556,121.956,22.1017,23.3959,0.6085782,14008.0356552,1.08632947407713
-c4,533.7291,748.8261,734.4923,336.544,74.5864,1558.1515,1028.1229,368.498,77.2477,1693.1477,1601.9877,107.72,72.987,639.1792,709.4595,442.3803,159.941,38.5238,761.6715,796.7183,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,393.7426,357.7395,253.3351,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,29.560412,50.3589,22.1017,23.3959,0.6085782,16942.9772902,0.898150411485638
-c5,367.1935,1240.8474,67.644319,109.339,74.5864,67.644319,1543.1258,108.541,77.2477,1693.1477,1471.0861,351.059,72.987,29.480448,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,393.7426,357.7395,253.3351,106.94,21.8376,511.0788,475.3594,120.4748,21.963,521.315,483.9556,121.956,22.1017,23.3959,0.08824,15016.108626,1.0134011675376
-c6,533.7291,1160.9424,67.644319,109.339,74.5864,67.644319,1543.1258,108.541,77.2477,67.644319,1601.9877,107.72,72.987,639.1792,690.021,558.879,49.588,38.5238,761.6715,796.7183,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,491.752,10.570936,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,521.315,483.9556,121.956,22.1017,23.3959,6.45743,13219.828417,1.15109981156195
-c7,533.7291,1160.9424,67.644319,109.339,74.5864,1021.7777,67.644319,108.541,77.2477,67.644319,1601.9877,107.72,72.987,639.1792,42.156294,21.193653,49.588,38.5238,42.156294,737.6456,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,393.7426,471.2729,291.5164,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,521.315,413.8585,50.3589,22.1017,23.3959,0.08824,11771.641438,1.29271198631703
-c8,533.7291,1160.9424,67.644319,109.339,74.5864,1021.7777,1670.0268,108.541,77.2477,1693.1477,1471.0861,351.059,72.987,639.1792,709.4595,21.193653,49.588,38.5238,761.6715,796.7183,49.8165,38.8844,768.7706,686.3864,175.4817,39.5474,226.8183,326.6018,10.570936,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,15338.3594982,0.992110141725051
-c9,533.7291,748.8261,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1471.0861,351.059,72.987,639.1792,709.4595,422.944,49.588,38.5238,668.3418,720.1492,175.6035,38.8844,42.156294,686.3864,175.4817,39.5474,393.7426,357.7395,10.570936,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,15844.5487272,0.960414984406291
-c10,367.1935,1240.8474,819.0329,336.544,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,1385.115,351.059,72.987,452.0318,42.156294,401.5683,159.941,38.5238,668.3418,720.1492,175.6035,38.8844,763.2659,728.2224,175.4817,39.5474,226.8183,491.752,10.570936,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,12435.7253652,1.22367948354795
-c11,533.7291,1160.9424,67.644319,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,67.644319,1385.115,351.059,72.987,639.1792,42.156294,401.5683,159.941,38.5238,42.156294,678.3662,175.6035,38.8844,768.7706,794.6234,49.6507,39.5474,226.8183,491.752,291.5164,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,521.315,413.8585,50.3589,22.1017,23.3959,0.6085782,12745.7919232,1.19391106391045
-c12,533.7291,1160.9424,67.644319,109.339,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,67.644319,107.72,72.987,452.0318,734.2297,558.879,49.588,38.5238,761.6715,678.3662,175.6035,38.8844,763.2659,735.4767,49.6507,39.5474,393.7426,471.2729,233.5022,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.08824,11080.999035,1.37328249279752
-c12,11080.999035
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,23.842121,0,0,0,23.842121,0,0,10.859436,0,0,0,0,0,0,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,65.895404,0
-c2,0,0,23.842121,0,0,0,23.842121,0,0,23.842121,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,114.896439,0
-c3,0,0,23.842121,0,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.351726,11.797401,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,78.63077,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,11.797401,0
-c5,0,0,23.842121,0,0,23.842121,0,0,0,0,0,0,0,10.859436,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,58.567101,0
-c6,0,0,23.842121,0,0,23.842121,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,3.468407,0,0,0,0,0,0,0,0,0,0,0,0,90.769396,0
-c7,0,0,23.842121,0,0,0,23.842121,0,0,23.842121,0,0,0,0,15.774626,6.991873,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,110.090911,0
-c8,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,0,0,0,0,0,3.468407,0,0,0,11.797401,0,0,0,0,0,0,0,0,46.099802,0
-c9,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,3.468407,0,0,0,0,0,0,0,0,0,0,0,0,43.085154,0
-c10,0,0,0,0,0,23.842121,23.842121,0,0,23.842121,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,3.468407,0,0,0,0,0,0,0,0,0,0,0,0,90.769396,0
-c11,0,0,23.842121,0,0,0,23.842121,0,0,23.842121,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,103.075615,0
-c12,0,0,23.842121,0,0,23.842121,23.842121,0,0,23.842121,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,119.234028,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,26.345144,0,0,0,26.345144,0,0,9.892485,0,0,0,0,0,0,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,67.588367,0
-c2,0,0,26.345144,0,0,0,26.345144,0,0,26.345144,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,112.524902,0
-c3,0,0,26.345144,0,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.005594,6.852352,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,71.400586,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,6.852352,0
-c5,0,0,26.345144,0,0,26.345144,0,0,0,0,0,0,0,9.892485,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,62.642135,0
-c6,0,0,26.345144,0,0,26.345144,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,4.920397,0,0,0,0,0,0,0,0,0,0,0,0,97.244707,0
-c7,0,0,26.345144,0,0,0,26.345144,0,0,26.345144,0,0,0,0,13.288878,9.837517,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,115.510067,0
-c8,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,0,0,0,0,0,4.920397,0,0,0,6.852352,0,0,0,0,0,0,0,0,47.95541,0
-c9,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,4.920397,0,0,0,0,0,0,0,0,0,0,0,0,44.554419,0
-c10,0,0,0,0,0,26.345144,26.345144,0,0,26.345144,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,4.920397,0,0,0,0,0,0,0,0,0,0,0,0,97.244707,0
-c11,0,0,26.345144,0,0,0,26.345144,0,0,26.345144,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,105.613188,0
-c12,0,0,26.345144,0,0,26.345144,26.345144,0,0,26.345144,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,131.785082,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,6.518272,0,0,0,6.518272,0,0,2.573634,0,0,0,0,0,0,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.03961,0
-c2,0,0,6.518272,0,0,0,6.518272,0,0,6.518272,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,28.708076,0
-c3,0,0,6.518272,0,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.429432,2.071773,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,18.609522,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,2.071773,0
-c5,0,0,6.518272,0,0,6.518272,0,0,0,0,0,0,0,2.573634,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,15.623833,0
-c6,0,0,6.518272,0,0,6.518272,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,1.153683,0,0,0,0,0,0,0,0,0,0,0,0,24.242415,0
-c7,0,0,6.518272,0,0,0,6.518272,0,0,6.518272,0,0,0,0,3.533916,2.308683,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,28.944986,0
-c8,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,0,0,0,0,0,1.153683,0,0,0,2.071773,0,0,0,0,0,0,0,0,12.052411,0
-c9,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,1.153683,0,0,0,0,0,0,0,0,0,0,0,0,11.205871,0
-c10,0,0,0,0,0,6.518272,6.518272,0,0,6.518272,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,1.153683,0,0,0,0,0,0,0,0,0,0,0,0,24.242415,0
-c11,0,0,6.518272,0,0,0,6.518272,0,0,6.518272,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,26.622648,0
-c12,0,0,6.518272,0,0,6.518272,6.518272,0,0,6.518272,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,32.605015,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,227.205,0,91.4069,0,0,0,89.908,0,0,0,0,0,40.812,110.353,0,0,0,0,0,42.4051,59.1467,0,0,0,0,69.8631,0,0,0,0,0,0,19.8115,30.88,0,0,0,0.0453634,781.8366634,0
-c2,13.0911,0,0,0,0,0,0,0,0,0,0,243.339,0,42.4202,0,0,110.353,0,0,0,0,0,42.4051,0,125.831,0,19.9204,0,0,0,0,20.0592,0,69.9894,0,19.8115,30.88,0,0,0,0,738.0999,0
-c3,13.0911,0,0,0,0,0,0,0,0,89.908,0,0,0,42.4202,0,0,110.353,0,0,0,0,0,42.4051,0,125.831,0,0,0,0,57.1891,0,20.0592,31.2805,0,0,0,0,71.5971,0,0,0.0453634,604.1796634,0
-c4,13.0911,73.9105,0,227.205,0,0,89.4069,259.957,0,0,0,0,0,42.4202,60.9,40.812,110.353,0,42.4989,59.0727,0,0,0,0,0,0,19.9204,31.1377,19.8329,57.1891,0,20.0592,0,69.9894,0,0,0,0,0,0,0.0453634,1237.8013634,0
-c5,0,79.905,0,0,0,0,0,0,0,0,85.9711,243.339,0,0,0,0,0,0,0,0,0,0,42.4051,59.1467,0,0,19.9204,31.1377,19.8329,57.1891,0,20.0592,0,69.9894,0,19.8115,0,71.5971,0,0,0,820.3042,0
-c6,13.0911,0,0,0,0,0,0,0,0,0,0,0,0,42.4202,0,135.935,0,0,42.4989,59.0727,0,0,0,0,0,0,0,20.4791,0,0,0,20.0592,0,69.9894,0,19.8115,0,71.5971,0,0,0,494.9542,0
-c7,13.0911,0,0,0,0,91.4069,0,0,0,0,0,0,0,42.4202,0,0,0,0,0,0,0,0,42.4051,59.1467,0,0,19.9204,0,69.8631,0,0,20.0592,0,69.9894,0,19.8115,30.88,0,0,0,0,478.9936,0
-c8,13.0911,0,0,0,0,91.4069,126.901,0,0,0,85.9711,243.339,0,42.4202,60.9,0,0,0,42.4989,59.0727,0,0,42.4051,0,125.831,0,0,0,0,0,0,0,0,0,0,0,20.3721,71.5971,0,0,0.0453634,1025.8515634,0
-c9,13.0911,73.9105,0,0,0,0,0,0,0,0,85.9711,243.339,0,42.4202,60.9,0,0,0,0,41.783,125.787,0,0,0,125.831,0,19.9204,31.1377,0,0,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,1046.1541634,0
-c10,0,79.905,84.5406,227.205,0,0,0,0,0,0,0,243.339,0,0,0,0,110.353,0,0,41.783,125.787,0,0,41.836,125.831,0,0,20.4791,0,0,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,1283.1218634,0
-c11,13.0911,0,0,0,0,0,0,0,0,0,0,243.339,0,42.4202,0,0,110.353,0,0,0,125.787,0,42.4051,59.1467,0,0,0,20.4791,69.8631,0,0,20.0592,0,69.9894,0,19.8115,30.88,0,0,0,0.0453634,867.6697634,0
-c12,13.0911,0,0,0,0,0,0,0,0,0,0,0,0,0,44.2087,135.935,0,0,42.4989,0,125.787,0,0,0,0,0,19.9204,0,0,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0,620.648,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,54.8381,0,20.9916,0,0,0,21.0334,0,0,0,0,0,10.5675,27.8587,0,0,0,0,0,10.5613,14.7899,0,0,0,0,16.9562,0,0,0,0,0,0,5.3629,8.01787,0,0,0,0.182164,191.159634,0
-c2,6.3224,0,0,0,0,0,0,0,0,0,0,60.3765,0,10.5944,0,0,27.8587,0,0,0,0,0,10.5613,0,31.0414,0,5.36591,0,0,0,0,5.36257,0,17.215,0,5.3629,8.01787,0,0,0,0,188.07895,0
-c3,6.3224,0,0,0,0,0,0,0,0,21.0334,0,0,0,10.5944,0,0,27.8587,0,0,0,0,0,10.5613,0,31.0414,0,0,0,0,14.405,0,5.36257,8.10378,0,0,0,0,17.4758,0,0,0.182164,152.940914,0
-c4,6.3224,20.9352,0,54.8381,0,0,21.0449,62.7404,0,0,0,0,0,10.5944,14.972,10.5675,27.8587,0,10.5792,14.881,0,0,0,0,0,0,5.36591,8.07266,5.36951,14.405,0,5.36257,0,17.215,0,0,0,0,0,0,0.182164,311.306614,0
-c5,0,27.627,0,0,0,0,0,0,0,0,20.9627,60.3765,0,0,0,0,0,0,0,0,0,0,10.5613,14.7899,0,0,5.36591,8.07266,5.36951,14.405,0,5.36257,0,17.215,0,5.3629,0,17.4758,0,0,0,212.94675,0
-c6,6.3224,0,0,0,0,0,0,0,0,0,0,0,0,10.5944,0,32.0547,0,0,10.5792,14.881,0,0,0,0,0,0,0,5.37147,0,0,0,5.36257,0,17.215,0,5.3629,0,17.4758,0,0,0,125.21944,0
-c7,6.3224,0,0,0,0,20.9916,0,0,0,0,0,0,0,10.5944,0,0,0,0,0,0,0,0,10.5613,14.7899,0,0,5.36591,0,16.9562,0,0,5.36257,0,17.215,0,5.3629,8.01787,0,0,0,0,121.54005,0
-c8,6.3224,0,0,0,0,20.9916,29.7996,0,0,0,20.9627,60.3765,0,10.5944,14.972,0,0,0,10.5792,14.881,0,0,10.5613,0,31.0414,0,0,0,0,0,0,0,0,0,0,0,5.37001,17.4758,0,0,0.182164,254.110074,0
-c9,6.3224,20.9352,0,0,0,0,0,0,0,0,20.9627,60.3765,0,10.5944,14.972,0,0,0,0,10.5947,31.2262,0,0,0,31.0414,0,5.36591,8.07266,0,0,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,266.069614,0
-c10,0,27.627,20.9195,54.8381,0,0,0,0,0,0,0,60.3765,0,0,0,0,27.8587,0,0,10.5947,31.2262,0,0,10.5466,31.0414,0,0,5.37147,0,0,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,326.005714,0
-c11,6.3224,0,0,0,0,0,0,0,0,0,0,60.3765,0,10.5944,0,0,27.8587,0,0,0,31.2262,0,10.5613,14.7899,0,0,0,5.37147,16.9562,0,0,5.36257,0,17.215,0,5.3629,8.01787,0,0,0,0.182164,220.197574,0
-c12,6.3224,0,0,0,0,0,0,0,0,0,0,0,0,0,10.6041,32.0547,0,0,10.5792,0,31.2262,0,0,0,0,0,5.36591,0,0,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0,155.98089,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,137.165,207.8389,189.6515,31.9722,19.0941,208.4406,198.6494,31.9182,19.1354,259.7649,285.9489,32.0305,19.0951,123.27345,142.4026,83.6932,18.2347,9.39785,151.93707,142.1728,18.074,9.41748,151.49962,142.1021,18.0244,9.40974,77.39211,95.8733,48.1179,14.1964,4.87483,100.88243,95.9383,14.534,4.81198,100.7067,95.9563,14.737,4.81593,7.47447,0.725589,3341.380949,0.999999970072255
-c1,137.165,207.8389,189.6515,80.0402,16.7972,229.4322,15.542688,24.9686,17.3272,280.7983,15.542688,25.4775,16.8139,7.12909,148.08761,94.2607,39.328,8.57887,157.84032,176.87812,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,4.889272,95.8733,67.88834,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,106.0696,97.03603,11.2081,4.48848,5.19545,0.907753,2876.180231,1.16174250723641
-c2,215.0729,342.9757,15.542688,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,15.542688,285.9489,85.854,16.8139,133.86785,10.338268,83.6932,39.328,8.57887,10.338268,176.87812,11.2953,8.59732,162.06092,142.1021,42.2766,8.56125,82.75802,7.838173,50.93214,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,97.03603,11.2081,4.48848,5.19545,0.017259,2888.693022,1.15671025196563
-c3,215.0729,342.9757,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,280.7983,15.542688,25.4775,16.8139,133.86785,142.4026,83.6932,39.328,8.57887,157.84032,176.87812,11.2953,8.59732,162.06092,142.1021,42.2766,8.56125,4.889272,7.838173,48.1179,25.6923,4.46626,106.245,97.19258,11.1843,4.46968,7.838173,95.9563,28.6839,4.48848,5.19545,0.907753,3283.532547,1.01761770270591
-c4,215.0729,228.7741,189.6515,80.0402,16.7972,356.8393,219.6943,87.709,17.3272,399.7504,388.4166,25.4775,16.8139,133.86785,163.05961,94.2607,39.328,8.57887,162.51627,191.75912,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,82.75802,82.58087,53.48741,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,7.838173,11.2081,4.48848,5.19545,0.907753,4036.969736,0.827695297399294
-c5,137.165,370.6027,15.542688,25.2021,16.7972,15.542688,359.5265,24.9686,17.3272,399.7504,306.9116,85.854,16.8139,7.12909,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,82.75802,82.58087,53.48741,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,95.9563,28.6839,4.48848,5.19545,0.017259,3518.882435,0.949557399477104
-c6,215.0729,342.9757,15.542688,25.2021,16.7972,15.542688,359.5265,24.9686,17.3272,15.542688,388.4166,25.4775,16.8139,133.86785,142.4026,131.96492,11.4693,8.57887,162.51627,191.75912,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,101.24477,2.306963,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,95.9563,28.6839,4.48848,5.19545,1.571852,3140.749007,1.06388025122823
-c7,215.0729,342.9757,15.542688,25.2021,16.7972,229.4322,15.542688,24.9686,17.3272,15.542688,388.4166,25.4775,16.8139,133.86785,10.338268,4.615243,11.4693,8.57887,10.338268,176.87812,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,82.75802,95.8733,67.88834,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,97.03603,11.2081,4.48848,5.19545,0.017259,2819.074502,1.18527581591117
-c8,215.0729,342.9757,15.542688,25.2021,16.7972,229.4322,389.3261,24.9686,17.3272,399.7504,306.9116,85.854,16.8139,133.86785,163.05961,4.615243,11.4693,8.57887,162.51627,191.75912,11.2953,8.59732,162.06092,142.1021,42.2766,8.56125,51.58494,74.50821,2.306963,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,3615.9012,0.924079689066734
-c9,215.0729,228.7741,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,306.9116,85.854,16.8139,133.86785,163.05961,99.91022,11.4693,8.57887,157.84032,152.7675,42.5215,8.59732,10.338268,142.1021,42.2766,8.56125,82.75802,82.58087,2.306963,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,3701.151142,0.902795030660355
-c10,137.165,370.6027,210.571,80.0402,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,285.9489,85.854,16.8139,104.7504,10.338268,83.6932,39.328,8.57887,157.84032,152.7675,42.5215,8.59732,181.04838,152.6487,42.2766,8.56125,51.58494,101.24477,2.306963,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,2923.037838,1.14311925464992
-c11,215.0729,342.9757,15.542688,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,15.542688,285.9489,85.854,16.8139,133.86785,10.338268,83.6932,39.328,8.57887,10.338268,142.1728,42.5215,8.59732,162.06092,190.78621,11.2352,8.56125,51.58494,101.24477,67.88834,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,97.03603,11.2081,4.48848,5.19545,0.907753,2982.936823,1.12016480242549
-c12,215.0729,342.9757,15.542688,25.2021,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,15.542688,25.4775,16.8139,104.7504,153.0067,131.96492,11.4693,8.57887,162.51627,142.1728,42.5215,8.59732,181.04838,175.99631,11.2352,8.56125,82.75802,95.8733,48.1179,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.017259,2627.265579,1.27180930946877
-c12,2627.265579
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_results2.csv b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_results2.csv
deleted file mode 100644
index 79f9be2d437ef747e412badb8800b6261fbc6f41..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_promise_results2.csv
+++ /dev/null
@@ -1,396 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,367.1935,674.9156,734.4923,131.604,78.945,930.3708,938.716,154.555,92.4142,1252.9937,1385.115,157.253,93.1857,596.759,690.021,401.5683,85.0395,42.293,719.1726,678.3662,85.9011,43.1252,726.3655,686.3864,86.3285,43.6635,373.8222,471.2729,233.5022,67.2674,20.4251,491.0196,475.3594,71.0934,21.0221,501.5035,483.9556,73.1106,21.5459,35.1354,0.5632148,15217.3421148,0.99999999342855
-c1,367.1935,674.9156,734.4923,109.339,74.5864,930.3708,17.457054,108.541,77.2477,1252.9937,17.457054,107.72,72.987,8.728527,648.5595,401.5683,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,6.546395,471.2729,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,501.5035,382.9785,50.3589,22.1017,23.3959,0.5632148,11291.7676448,1.34764923072455
-c2,367.1935,674.9156,17.457054,109.339,74.5864,930.3708,17.457054,108.541,77.2477,1252.9937,1385.115,107.72,72.987,8.728527,648.5595,401.5683,49.588,38.5238,13.092790,13.092790,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,373.8222,471.2729,221.6533,49.7509,21.8376,347.1207,475.3594,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,0.005455,10914.84997,1.39418700368827
-c3,520.638,674.9156,17.457054,109.339,74.5864,17.457054,938.716,108.541,77.2477,17.457054,17.457054,107.72,72.987,596.759,690.021,422.944,49.588,38.5238,719.1726,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,10.910659,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,483.9556,50.3589,22.1017,23.3959,6.45743,8908.297885,1.70822104743502
-c4,520.638,1160.9424,1316.8065,109.339,74.5864,17.457054,938.716,108.541,77.2477,17.457054,17.457054,107.72,72.987,8.728527,690.021,422.944,49.588,38.5238,719.1726,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,11439.1080628,1.33029095434964
-c5,520.638,1160.9424,1316.8065,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,17.457054,107.72,72.987,452.0318,690.021,401.5683,49.588,38.5238,719.1726,737.6456,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,6.546395,10.910659,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,10682.6043848,1.42449738136916
-c6,520.638,1160.9424,17.457054,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,17.457054,1385.115,107.72,72.987,596.759,13.092790,401.5683,49.588,38.5238,13.092790,737.6456,49.8165,38.8844,726.3655,686.3864,49.6507,39.5474,373.8222,10.910659,221.6533,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,382.9785,50.3589,22.1017,23.3959,0.005455,11365.706356,1.33888220443761
-c7,520.638,1160.9424,1316.8065,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,17.457054,107.72,72.987,452.0318,690.021,422.944,49.588,38.5238,719.1726,13.092790,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,226.8183,10.910659,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,10199.6991798,1.49194027170362
-c8,520.638,1160.9424,17.457054,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,17.457054,107.72,72.987,452.0318,690.021,422.944,49.588,38.5238,719.1726,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,6.546395,10.910659,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,6.45743,8722.872444,1.74453335618978
-c9,520.638,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1252.9937,17.457054,107.72,72.987,596.759,690.021,401.5683,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,726.3655,686.3864,49.6507,39.5474,6.546395,10.910659,233.5022,49.7509,21.8376,491.0196,382.2379,50.4854,21.963,10.910659,483.9556,50.3589,22.1017,23.3959,0.5632148,13253.8246358,1.1481472267923
-c10,520.638,674.9156,734.4923,109.339,74.5864,1558.1515,938.716,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,596.759,648.5595,401.5683,49.588,38.5238,719.1726,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,373.8222,326.6018,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,10.910659,50.3589,22.1017,23.3959,0.5632148,15686.5261738,0.970089989918059
-c11,367.1935,674.9156,17.457054,109.339,74.5864,17.457054,1543.1258,108.541,77.2477,1252.9937,1385.115,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,373.8222,326.6018,233.5022,49.7509,21.8376,491.0196,382.2379,50.4854,21.963,501.5035,10.910659,50.3589,22.1017,23.3959,0.005455,12337.898812,1.23338197397609
-c12,520.638,674.9156,1316.8065,109.339,74.5864,17.457054,938.716,108.541,77.2477,1693.1477,1385.115,107.72,72.987,596.759,13.092790,4.364263,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,373.8222,10.910659,233.5022,49.7509,21.8376,347.1207,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,12679.7733708,1.20012728538437
-c13,520.638,674.9156,17.457054,109.339,74.5864,17.457054,938.716,108.541,77.2477,1252.9937,1385.115,107.72,72.987,452.0318,13.092790,4.364263,49.588,38.5238,719.1726,13.092790,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,6.546395,326.6018,233.5022,49.7509,21.8376,347.1207,475.3594,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,10699.705976,1.42222057379065
-c14,367.1935,1160.9424,17.457054,109.339,74.5864,17.457054,1543.1258,108.541,77.2477,1693.1477,1385.115,107.72,72.987,8.728527,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,373.8222,326.6018,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,483.9556,50.3589,22.1017,23.3959,0.005455,14074.59519,1.08119216227922
-c15,520.638,1160.9424,17.457054,109.339,74.5864,17.457054,1543.1258,108.541,77.2477,17.457054,1601.9877,107.72,72.987,596.759,690.021,422.944,49.588,38.5238,719.1726,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,471.2729,2.182132,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,483.9556,50.3589,22.1017,23.3959,6.45743,12536.860114,1.21380807116336
-c16,520.638,674.9156,1316.8065,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,17.457054,107.72,72.987,452.0318,690.021,422.944,49.588,38.5238,719.1726,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,373.8222,326.6018,2.182132,49.7509,21.8376,347.1207,475.3594,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,0.5632148,9737.0717528,1.56282528719598
-c17,520.638,1160.9424,17.457054,109.339,74.5864,930.3708,17.457054,108.541,77.2477,17.457054,1601.9877,107.72,72.987,596.759,13.092790,4.364263,49.588,38.5238,13.092790,737.6456,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,373.8222,471.2729,221.6533,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,382.9785,50.3589,22.1017,23.3959,0.005455,11067.04686,1.37501378369501
-c18,520.638,1160.9424,17.457054,109.339,74.5864,930.3708,17.457054,108.541,77.2477,1252.9937,17.457054,107.72,72.987,452.0318,690.021,422.944,49.588,38.5238,719.1726,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,6.546395,10.910659,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,10865.4286208,1.40052845643072
-c19,520.638,1160.9424,17.457054,109.339,74.5864,930.3708,1543.1258,108.541,77.2477,1693.1477,1385.115,107.72,72.987,596.759,648.5595,4.364263,49.588,38.5238,719.1726,737.6456,49.8165,38.8844,726.3655,686.3864,49.6507,39.5474,226.8183,326.6018,2.182132,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,14218.4527228,1.07025302291669
-c20,367.1935,674.9156,17.457054,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1385.115,107.72,72.987,452.0318,648.5595,401.5683,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,726.3655,686.3864,49.6507,39.5474,6.546395,326.6018,221.6533,49.7509,21.8376,347.1207,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.005455,12621.276848,1.20568958097472
-c21,367.1935,674.9156,17.457054,109.339,74.5864,1558.1515,938.716,108.541,77.2477,1252.9937,1385.115,107.72,72.987,596.759,690.021,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,6.546395,326.6018,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,13552.2424538,1.12286524199887
-c22,520.638,1160.9424,17.457054,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,17.457054,107.72,72.987,8.728527,13.092790,401.5683,49.588,38.5238,719.1726,678.3662,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,483.9556,50.3589,22.1017,23.3959,0.005455,8883.627842,1.71296481731923
-c23,520.638,1160.9424,1316.8065,109.339,74.5864,930.3708,17.457054,108.541,77.2477,17.457054,17.457054,107.72,72.987,452.0318,690.021,422.944,49.588,38.5238,719.1726,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,11465.2044668,1.32726302667683
-c24,520.638,1160.9424,17.457054,109.339,74.5864,17.457054,17.457054,108.541,77.2477,1693.1477,17.457054,107.72,72.987,596.759,690.021,401.5683,49.588,38.5238,719.1726,13.092790,49.8165,38.8844,763.2659,686.3864,49.6507,39.5474,226.8183,10.910659,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,10687.2019798,1.42388456783862
-c25,520.638,674.9156,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1385.115,107.72,72.987,596.759,648.5595,422.944,49.588,38.5238,668.3418,678.3662,49.8165,38.8844,13.092790,686.3864,49.6507,39.5474,373.8222,326.6018,2.182132,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,14710.7549908,1.03443650722707
-c26,367.1935,1160.9424,734.4923,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,1385.115,107.72,72.987,452.0318,13.092790,401.5683,49.588,38.5238,668.3418,678.3662,49.8165,38.8844,763.2659,686.3864,49.6507,39.5474,226.8183,471.2729,2.182132,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,10964.5893988,1.38786245636149
-c27,367.1935,1160.9424,17.457054,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,17.457054,107.72,72.987,8.728527,13.092790,422.944,49.588,38.5238,719.1726,678.3662,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,373.8222,326.6018,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,8754.6422018,1.73820261184986
-c28,520.638,1160.9424,17.457054,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,17.457054,1385.115,107.72,72.987,596.759,13.092790,401.5683,49.588,38.5238,13.092790,678.3662,49.8165,38.8844,726.3655,735.4767,49.6507,39.5474,226.8183,471.2729,221.6533,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,501.5035,382.9785,50.3589,22.1017,23.3959,0.5632148,11669.4333568,1.30403435360716
-c29,520.638,1160.9424,1316.8065,109.339,74.5864,930.3708,17.457054,108.541,77.2477,1252.9937,17.457054,107.72,72.987,596.759,648.5595,4.364263,49.588,38.5238,13.092790,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,471.2729,221.6533,49.7509,21.8376,347.1207,475.3594,50.4854,21.963,501.5035,483.9556,50.3589,22.1017,23.3959,0.5632148,12500.4014758,1.21734826057587
-c30,520.638,1160.9424,17.457054,109.339,74.5864,17.457054,17.457054,108.541,77.2477,17.457054,17.457054,107.72,72.987,452.0318,690.021,422.944,49.588,38.5238,719.1726,678.3662,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,373.8222,471.2729,233.5022,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.005455,10209.331925,1.49053259092139
-c31,367.1935,1160.9424,734.4923,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,17.457054,107.72,72.987,596.759,13.092790,4.364263,49.588,38.5238,668.3418,678.3662,49.8165,38.8844,763.2659,686.3864,49.6507,39.5474,226.8183,471.2729,2.182132,49.7509,21.8376,491.0196,475.3594,50.4854,21.963,393.9254,483.9556,50.3589,22.1017,23.3959,0.5632148,12560.8397078,1.21149081969427
-c8,8722.872444
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,137.165,207.8389,189.6515,31.9722,19.0941,208.4406,198.6494,31.9182,19.1354,259.7649,285.9489,32.0305,19.0951,123.27345,142.4026,83.6932,18.2347,9.39785,151.93707,142.1728,18.074,9.41748,151.49962,142.1021,18.0244,9.40974,77.39211,95.8733,48.1179,14.1964,4.87483,100.88243,95.9383,14.534,4.81198,100.7067,95.9563,14.737,4.81593,7.47447,0.725589,3341.380949,0.999999970072255
-c1,137.165,207.8389,189.6515,25.2021,16.7972,208.4406,9.024416,24.9686,17.3272,259.7649,9.024416,25.4775,16.8139,4.555456,148.08761,83.6932,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,3.459840,95.8733,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,100.7067,89.01816,11.2081,4.48848,5.19545,0.725589,2667.980987,1.25240053810022
-c2,137.165,207.8389,9.024416,25.2021,16.7972,208.4406,9.024416,24.9686,17.3272,259.7649,285.9489,25.4775,16.8139,4.555456,148.08761,83.6932,11.4693,8.57887,6.804352,6.804352,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,77.39211,95.8733,50.93214,11.2873,4.46626,80.09278,95.9383,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,0.003604,2513.348096,1.32945405428417
-c3,208.7505,207.8389,9.024416,25.2021,16.7972,9.024416,198.6494,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,123.27345,142.4026,99.91022,11.4693,8.57887,151.93707,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,5.766400,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,95.9563,11.2081,4.48848,5.19545,1.571852,2102.66502,1.58911702924905
-c4,208.7505,342.9757,328.4669,25.2021,16.7972,9.024416,198.6494,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,4.555456,142.4026,99.91022,11.4693,8.57887,151.93707,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2767.065433,1.2075539625465
-c5,208.7505,342.9757,328.4669,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,104.7504,142.4026,83.6932,11.4693,8.57887,151.93707,176.87812,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,3.459840,5.766400,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2583.744513,1.29323189768369
-c6,208.7505,342.9757,9.024416,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,9.024416,285.9489,25.4775,16.8139,123.27345,6.804352,83.6932,11.4693,8.57887,6.804352,176.87812,11.2953,8.59732,151.49962,142.1021,11.2352,8.56125,77.39211,5.766400,50.93214,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,89.01816,11.2081,4.48848,5.19545,0.003604,2671.905996,1.25056077157885
-c7,208.7505,342.9757,328.4669,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,104.7504,142.4026,99.91022,11.4693,8.57887,151.93707,6.804352,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,51.58494,5.766400,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2478.012865,1.34841140712111
-c8,208.7505,342.9757,9.024416,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,104.7504,142.4026,99.91022,11.4693,8.57887,151.93707,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,3.459840,5.766400,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,1.571852,2140.840304,1.56078002954208
-c9,208.7505,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,259.7649,9.024416,25.4775,16.8139,123.27345,142.4026,83.6932,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,151.49962,142.1021,11.2352,8.56125,3.459840,5.766400,48.1179,11.2873,4.46626,100.88243,89.0888,11.1843,4.46968,5.766400,95.9563,11.2081,4.48848,5.19545,0.725589,3111.982111,1.07371466880149
-c10,208.7505,207.8389,189.6515,25.2021,16.7972,356.8393,198.6494,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,123.27345,148.08761,83.6932,11.4693,8.57887,151.93707,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,77.39211,74.50821,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,5.766400,11.2081,4.48848,5.19545,0.725589,3723.591349,0.897354340497627
-c11,137.165,207.8389,9.024416,25.2021,16.7972,9.024416,359.5265,24.9686,17.3272,259.7649,285.9489,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,77.39211,74.50821,48.1179,11.2873,4.46626,100.88243,89.0888,11.1843,4.46968,100.7067,5.766400,11.2081,4.48848,5.19545,0.003604,2877.820088,1.1610805160562
-c12,208.7505,207.8389,328.4669,25.2021,16.7972,9.024416,198.6494,24.9686,17.3272,399.7504,285.9489,25.4775,16.8139,123.27345,6.804352,2.306560,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,77.39211,5.766400,48.1179,11.2873,4.46626,80.09278,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2996.393429,1.11513421607046
-c13,208.7505,207.8389,9.024416,25.2021,16.7972,9.024416,198.6494,24.9686,17.3272,259.7649,285.9489,25.4775,16.8139,104.7504,6.804352,2.306560,11.4693,8.57887,151.93707,6.804352,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,3.459840,74.50821,48.1179,11.2873,4.46626,80.09278,95.9383,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,2501.256808,1.33588074792036
-c14,137.165,342.9757,9.024416,25.2021,16.7972,9.024416,359.5265,24.9686,17.3272,399.7504,285.9489,25.4775,16.8139,4.555456,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,77.39211,74.50821,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,95.9563,11.2081,4.48848,5.19545,0.003604,3290.311852,1.01552101981363
-c15,208.7505,342.9757,9.024416,25.2021,16.7972,9.024416,359.5265,24.9686,17.3272,9.024416,388.4166,25.4775,16.8139,123.27345,142.4026,99.91022,11.4693,8.57887,151.93707,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,95.8733,1.153280,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,95.9563,11.2081,4.48848,5.19545,1.571852,2991.287152,1.11703780596996
-c16,208.7505,207.8389,328.4669,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,104.7504,142.4026,99.91022,11.4693,8.57887,151.93707,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,77.39211,74.50821,1.153280,11.2873,4.46626,80.09278,95.9383,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,0.725589,2392.281395,1.39673401979812
-c17,208.7505,342.9757,9.024416,25.2021,16.7972,208.4406,9.024416,24.9686,17.3272,9.024416,388.4166,25.4775,16.8139,123.27345,6.804352,2.306560,11.4693,8.57887,6.804352,176.87812,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,77.39211,95.8733,50.93214,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,89.01816,11.2081,4.48848,5.19545,0.003604,2668.589466,1.25211497173335
-c18,208.7505,342.9757,9.024416,25.2021,16.7972,208.4406,9.024416,24.9686,17.3272,259.7649,9.024416,25.4775,16.8139,104.7504,142.4026,99.91022,11.4693,8.57887,151.93707,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,3.459840,5.766400,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2590.150709,1.29003335921202
-c19,208.7505,342.9757,9.024416,25.2021,16.7972,208.4406,359.5265,24.9686,17.3272,399.7504,285.9489,25.4775,16.8139,123.27345,148.08761,2.306560,11.4693,8.57887,151.93707,176.87812,11.2953,8.59732,151.49962,142.1021,11.2352,8.56125,51.58494,74.50821,1.153280,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,3349.738715,0.997504920096285
-c20,137.165,207.8389,9.024416,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,285.9489,25.4775,16.8139,104.7504,148.08761,83.6932,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,151.49962,142.1021,11.2352,8.56125,3.459840,74.50821,50.93214,11.2873,4.46626,80.09278,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.003604,2930.709778,1.14012682527288
-c21,137.165,207.8389,9.024416,25.2021,16.7972,356.8393,198.6494,24.9686,17.3272,259.7649,285.9489,25.4775,16.8139,123.27345,142.4026,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,3.459840,74.50821,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,3091.544687,1.08081272606839
-c22,208.7505,342.9757,9.024416,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,4.555456,6.804352,83.6932,11.4693,8.57887,151.93707,142.1728,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,95.9563,11.2081,4.48848,5.19545,0.003604,2149.377542,1.5545806766143
-c23,208.7505,342.9757,328.4669,25.2021,16.7972,208.4406,9.024416,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,104.7504,142.4026,99.91022,11.4693,8.57887,151.93707,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2775.719619,1.20378903032897
-c24,208.7505,342.9757,9.024416,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,123.27345,142.4026,83.6932,11.4693,8.57887,151.93707,6.804352,11.2953,8.59732,181.04838,142.1021,11.2352,8.56125,51.58494,5.766400,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2547.256945,1.31175648549438
-c25,208.7505,207.8389,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,285.9489,25.4775,16.8139,123.27345,148.08761,99.91022,11.4693,8.57887,157.84032,142.1728,11.2953,8.59732,6.804352,142.1021,11.2352,8.56125,77.39211,74.50821,1.153280,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,3423.875657,0.975906015914467
-c26,137.165,342.9757,189.6515,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,285.9489,25.4775,16.8139,104.7504,6.804352,83.6932,11.4693,8.57887,157.84032,142.1728,11.2953,8.59732,181.04838,142.1021,11.2352,8.56125,51.58494,95.8733,1.153280,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2572.789709,1.2987384112419
-c27,137.165,342.9757,9.024416,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,4.555456,6.804352,99.91022,11.4693,8.57887,151.93707,142.1728,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,77.39211,74.50821,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2081.109117,1.60557693066044
-c28,208.7505,342.9757,9.024416,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,9.024416,285.9489,25.4775,16.8139,123.27345,6.804352,83.6932,11.4693,8.57887,6.804352,142.1728,11.2953,8.59732,151.49962,175.99631,11.2352,8.56125,51.58494,95.8733,50.93214,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,100.7067,89.01816,11.2081,4.48848,5.19545,0.725589,2736.116601,1.22121287727924
-c29,208.7505,342.9757,328.4669,25.2021,16.7972,208.4406,9.024416,24.9686,17.3272,259.7649,9.024416,25.4775,16.8139,123.27345,148.08761,2.306560,11.4693,8.57887,6.804352,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,95.8733,50.93214,11.2873,4.46626,80.09278,95.9383,11.1843,4.46968,100.7067,95.9563,11.2081,4.48848,5.19545,0.725589,2991.275573,1.11704212993812
-c30,208.7505,342.9757,9.024416,25.2021,16.7972,9.024416,9.024416,24.9686,17.3272,9.024416,9.024416,25.4775,16.8139,104.7504,142.4026,99.91022,11.4693,8.57887,151.93707,142.1728,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,77.39211,95.8733,48.1179,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.003604,2438.679674,1.37015978261031
-c31,137.165,342.9757,189.6515,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,123.27345,6.804352,2.306560,11.4693,8.57887,157.84032,142.1728,11.2953,8.59732,181.04838,142.1021,11.2352,8.56125,51.58494,95.8733,1.153280,11.2873,4.46626,100.88243,95.9383,11.1843,4.46968,90.82636,95.9563,11.2081,4.48848,5.19545,0.725589,2971.542503,1.12446005170063
-c27,2081.109117
-
-Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,367.1935,674.9156,734.4923,131.604,78.945,930.3708,938.716,154.555,92.4142,1252.9937,1385.115,157.253,93.1857,596.759,690.021,401.5683,85.0395,42.293,719.1726,678.3662,85.9011,43.1252,726.3655,686.3864,86.3285,43.6635,373.8222,471.2729,233.5022,67.2674,20.4251,491.0196,475.3594,71.0934,21.0221,501.5035,483.9556,73.1106,21.5459,35.1354,0.5632148,15217.3421148,0.99999999342855
-c1,367.1935,674.9156,734.4923,336.544,74.5864,1021.7777,67.644319,108.541,77.2477,1342.9017,67.644319,107.72,72.987,29.480448,648.5595,442.3803,159.941,38.5238,668.3418,737.6456,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,18.903715,471.2729,291.5164,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,521.315,413.8585,50.3589,22.1017,23.3959,0.6085782,12207.0880792,1.24659885235606
-c2,367.1935,674.9156,67.644319,109.339,74.5864,1021.7777,67.644319,108.541,77.2477,1342.9017,1385.115,351.059,72.987,29.480448,648.5595,442.3803,159.941,38.5238,42.156294,42.156294,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,393.7426,471.2729,291.5164,49.7509,21.8376,347.1207,495.6046,120.4748,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,0.08824,11951.575014,1.27324992477138
-c3,533.7291,748.8261,67.644319,109.339,74.5864,67.644319,938.716,368.498,77.2477,67.644319,67.644319,107.72,72.987,639.1792,690.021,558.879,49.588,38.5238,761.6715,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,29.560412,233.5022,106.94,21.8376,511.0788,475.3594,120.4748,21.963,521.315,483.9556,121.956,22.1017,23.3959,6.45743,9992.282706,1.52290947026265
-c4,533.7291,1160.9424,1316.8065,109.339,74.5864,67.644319,938.716,368.498,77.2477,67.644319,67.644319,107.72,72.987,29.480448,690.021,558.879,49.588,38.5238,761.6715,796.7183,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,233.5022,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,12378.8784952,1.22929892217383
-c5,533.7291,1160.9424,1316.8065,109.339,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,67.644319,107.72,72.987,452.0318,734.2297,401.5683,159.941,38.5238,761.6715,796.7183,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,18.903715,29.560412,233.5022,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,11524.3889812,1.32044675058953
-c6,533.7291,1160.9424,67.644319,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,67.644319,1385.115,351.059,72.987,639.1792,42.156294,401.5683,159.941,38.5238,42.156294,737.6456,49.8165,38.8844,768.7706,686.3864,175.4817,39.5474,393.7426,29.560412,221.6533,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,521.315,413.8585,50.3589,22.1017,23.3959,0.08824,12331.227597,1.23404923570604
-c7,533.7291,1160.9424,1316.8065,109.339,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,67.644319,107.72,72.987,452.0318,734.2297,558.879,49.588,38.5238,761.6715,42.156294,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,226.8183,29.560412,233.5022,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,11024.6992602,1.38029542735068
-c8,533.7291,1160.9424,67.644319,109.339,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,67.644319,107.72,72.987,452.0318,734.2297,558.879,49.588,38.5238,761.6715,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,18.903715,29.560412,233.5022,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,6.45743,9508.819946,1.60033968896081
-c9,533.7291,1160.9424,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1342.9017,67.644319,107.72,72.987,639.1792,690.021,401.5683,159.941,38.5238,668.3418,737.6456,49.8165,38.8844,768.7706,686.3864,175.4817,39.5474,18.903715,29.560412,233.5022,106.94,21.8376,511.0788,413.5184,50.4854,21.963,29.560412,483.9556,121.956,22.1017,23.3959,0.6085782,14008.0356552,1.08632947407713
-c10,533.7291,748.8261,734.4923,336.544,74.5864,1558.1515,1028.1229,368.498,77.2477,1693.1477,1601.9877,107.72,72.987,639.1792,709.4595,442.3803,159.941,38.5238,761.6715,796.7183,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,393.7426,357.7395,253.3351,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,29.560412,50.3589,22.1017,23.3959,0.6085782,16942.9772902,0.898150411485638
-c11,367.1935,674.9156,67.644319,109.339,74.5864,67.644319,1543.1258,108.541,77.2477,1342.9017,1385.115,351.059,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,393.7426,357.7395,253.3351,106.94,21.8376,511.0788,413.5184,50.4854,21.963,521.315,29.560412,50.3589,22.1017,23.3959,0.08824,13018.547684,1.16889705113671
-c12,533.7291,748.8261,1419.8715,109.339,74.5864,67.644319,938.716,368.498,77.2477,1693.1477,1471.0861,351.059,72.987,639.1792,42.156294,21.193653,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,393.7426,29.560412,233.5022,106.94,21.8376,347.1207,495.6046,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,14006.2311502,1.08646943228091
-c13,533.7291,748.8261,67.644319,109.339,74.5864,67.644319,938.716,368.498,77.2477,1342.9017,1385.115,351.059,72.987,452.0318,42.156294,21.193653,49.588,38.5238,761.6715,42.156294,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,18.903715,326.6018,253.3351,106.94,21.8376,347.1207,495.6046,120.4748,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,11878.907124,1.28103888917114
-c14,367.1935,1240.8474,67.644319,109.339,74.5864,67.644319,1543.1258,108.541,77.2477,1693.1477,1471.0861,351.059,72.987,29.480448,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,393.7426,357.7395,253.3351,106.94,21.8376,511.0788,475.3594,120.4748,21.963,521.315,483.9556,121.956,22.1017,23.3959,0.08824,15016.108626,1.0134011675376
-c15,533.7291,1160.9424,67.644319,109.339,74.5864,67.644319,1543.1258,108.541,77.2477,67.644319,1601.9877,107.72,72.987,639.1792,690.021,558.879,49.588,38.5238,761.6715,796.7183,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,491.752,10.570936,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,521.315,483.9556,121.956,22.1017,23.3959,6.45743,13219.828417,1.15109981156195
-c16,533.7291,748.8261,1419.8715,109.339,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,67.644319,107.72,72.987,452.0318,734.2297,558.879,49.588,38.5238,761.6715,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,393.7426,357.7395,10.570936,49.7509,21.8376,347.1207,495.6046,120.4748,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,0.6085782,10529.3203842,1.44523496436781
-c17,533.7291,1160.9424,67.644319,109.339,74.5864,1021.7777,67.644319,108.541,77.2477,67.644319,1601.9877,107.72,72.987,639.1792,42.156294,21.193653,49.588,38.5238,42.156294,737.6456,49.8165,38.8844,768.7706,794.6234,49.6507,39.5474,393.7426,471.2729,291.5164,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,521.315,413.8585,50.3589,22.1017,23.3959,0.08824,11771.641438,1.29271198631703
-c18,533.7291,1160.9424,67.644319,109.339,74.5864,1021.7777,67.644319,108.541,77.2477,1342.9017,67.644319,107.72,72.987,452.0318,734.2297,558.879,49.588,38.5238,761.6715,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,18.903715,29.560412,233.5022,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,11732.3618562,1.29703994571684
-c19,533.7291,1160.9424,67.644319,109.339,74.5864,1021.7777,1670.0268,108.541,77.2477,1693.1477,1471.0861,351.059,72.987,639.1792,709.4595,21.193653,49.588,38.5238,761.6715,796.7183,49.8165,38.8844,768.7706,686.3864,175.4817,39.5474,226.8183,326.6018,10.570936,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,15338.3594982,0.992110141725051
-c20,367.1935,674.9156,67.644319,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1471.0861,351.059,72.987,452.0318,648.5595,442.3803,159.941,38.5238,668.3418,42.156294,49.8165,38.8844,768.7706,686.3864,175.4817,39.5474,18.903715,326.6018,221.6533,49.7509,21.8376,347.1207,495.6046,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.08824,13594.069987,1.11941030297853
-c21,367.1935,674.9156,67.644319,109.339,74.5864,1558.1515,1028.1229,368.498,77.2477,1342.9017,1385.115,351.059,72.987,639.1792,690.021,558.879,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,18.903715,326.6018,253.3351,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,14763.9018062,1.03071276221427
-c22,533.7291,1160.9424,67.644319,109.339,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,67.644319,107.72,72.987,29.480448,42.156294,401.5683,159.941,38.5238,761.6715,678.3662,175.6035,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,253.3351,106.94,21.8376,511.0788,475.3594,120.4748,21.963,521.315,483.9556,121.956,22.1017,23.3959,0.08824,9734.671577,1.56321061662037
-c23,533.7291,1160.9424,1316.8065,109.339,74.5864,1021.7777,67.644319,108.541,77.2477,67.644319,67.644319,107.72,72.987,452.0318,734.2297,558.879,49.588,38.5238,761.6715,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,253.3351,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,12231.0555292,1.24415607091759
-c24,533.7291,1160.9424,67.644319,109.339,74.5864,67.644319,67.644319,108.541,77.2477,1693.1477,67.644319,107.72,72.987,639.1792,690.021,401.5683,159.941,38.5238,761.6715,42.156294,49.8165,38.8844,763.2659,728.2224,175.4817,39.5474,226.8183,29.560412,233.5022,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,11550.9467602,1.31741079748475
-c25,533.7291,748.8261,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1471.0861,351.059,72.987,639.1792,709.4595,422.944,49.588,38.5238,668.3418,720.1492,175.6035,38.8844,42.156294,686.3864,175.4817,39.5474,393.7426,357.7395,10.570936,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,15844.5487272,0.960414984406291
-c26,367.1935,1240.8474,819.0329,336.544,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,1385.115,351.059,72.987,452.0318,42.156294,401.5683,159.941,38.5238,668.3418,720.1492,175.6035,38.8844,763.2659,728.2224,175.4817,39.5474,226.8183,491.752,10.570936,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,12435.7253652,1.22367948354795
-c27,367.1935,1240.8474,67.644319,109.339,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,67.644319,107.72,72.987,29.480448,42.156294,422.944,49.588,38.5238,761.6715,678.3662,175.6035,38.8844,768.7706,794.6234,49.6507,39.5474,393.7426,357.7395,253.3351,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,9715.2799152,1.56633077904
-c28,533.7291,1160.9424,67.644319,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,67.644319,1385.115,351.059,72.987,639.1792,42.156294,401.5683,159.941,38.5238,42.156294,678.3662,175.6035,38.8844,768.7706,794.6234,49.6507,39.5474,226.8183,491.752,291.5164,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,521.315,413.8585,50.3589,22.1017,23.3959,0.6085782,12745.7919232,1.19391106391045
-c29,533.7291,1160.9424,1316.8065,109.339,74.5864,1021.7777,67.644319,108.541,77.2477,1342.9017,67.644319,107.72,72.987,639.1792,709.4595,21.193653,49.588,38.5238,42.156294,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,491.752,291.5164,49.7509,21.8376,347.1207,495.6046,120.4748,21.963,521.315,483.9556,121.956,22.1017,23.3959,0.6085782,13216.4258632,1.1513961608964
-c30,533.7291,1160.9424,67.644319,109.339,74.5864,67.644319,67.644319,108.541,77.2477,67.644319,67.644319,107.72,72.987,452.0318,734.2297,558.879,49.588,38.5238,761.6715,678.3662,175.6035,38.8844,763.2659,735.4767,49.6507,39.5474,393.7426,471.2729,233.5022,106.94,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.08824,11080.999035,1.37328249279752
-c31,367.1935,1240.8474,819.0329,336.544,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,67.644319,107.72,72.987,639.1792,42.156294,21.193653,49.588,38.5238,668.3418,720.1492,175.6035,38.8844,763.2659,728.2224,175.4817,39.5474,226.8183,491.752,10.570936,49.7509,21.8376,511.0788,475.3594,120.4748,21.963,393.9254,504.3277,121.956,22.1017,23.3959,0.6085782,13687.3459992,1.11178178768267
-c8,9508.819946
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,23.842121,0,0,0,23.842121,0,0,10.859436,0,0,0,0,0,0,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,65.895404,0
-c2,0,0,23.842121,0,0,0,23.842121,0,0,0,0,0,0,10.859436,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,90.116353,0
-c3,0,0,23.842121,0,0,23.842121,0,0,0,23.842121,23.842121,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,138.715137,0
-c4,0,0,0,0,0,23.842121,0,0,0,23.842121,23.842121,0,0,10.859436,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,94.1832,0
-c5,0,0,0,0,0,23.842121,23.842121,0,0,23.842121,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.351726,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,114.517611,0
-c6,0,0,23.842121,0,0,0,23.842121,0,0,23.842121,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,114.896439,0
-c7,0,0,0,0,0,23.842121,23.842121,0,0,23.842121,23.842121,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,122.940511,0
-c8,0,0,23.842121,0,0,23.842121,23.842121,0,0,23.842121,23.842121,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,7.351726,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,154.134358,0
-c9,0,0,23.842121,0,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.351726,11.797401,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,78.63077,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,11.797401,0
-c11,0,0,23.842121,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0.023423,75.279692,0
-c12,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,15.774626,6.991873,0,0,0,15.774626,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,74.180647,0
-c13,0,0,23.842121,0,0,23.842121,0,0,0,0,0,0,0,0,15.774626,6.991873,0,0,0,15.774626,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,93.577093,0
-c14,0,0,23.842121,0,0,23.842121,0,0,0,0,0,0,0,10.859436,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,58.567101,0
-c15,0,0,23.842121,0,0,23.842121,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,3.468407,0,0,0,0,0,0,0,0,0,0,0,0,90.769396,0
-c16,0,0,0,0,0,23.842121,23.842121,0,0,23.842121,23.842121,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,3.468407,0,0,0,0,0,0,0,0,0,0,0,0,114.611517,0
-c17,0,0,23.842121,0,0,0,23.842121,0,0,23.842121,0,0,0,0,15.774626,6.991873,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,110.090911,0
-c18,0,0,23.842121,0,0,0,23.842121,0,0,0,23.842121,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,7.351726,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,106.450116,0
-c19,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,0,0,0,0,0,3.468407,0,0,0,11.797401,0,0,0,0,0,0,0,0,46.099802,0
-c20,0,0,23.842121,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,70.834017,0
-c21,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,46.968473,0
-c22,0,0,23.842121,0,0,23.842121,23.842121,0,0,23.842121,23.842121,0,0,10.859436,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,145.86809,0
-c23,0,0,0,0,0,0,23.842121,0,0,23.842121,23.842121,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,87.300989,0
-c24,0,0,23.842121,0,0,23.842121,23.842121,0,0,0,23.842121,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,122.940511,0
-c25,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,3.468407,0,0,0,0,0,0,0,0,0,0,0,0,43.085154,0
-c26,0,0,0,0,0,23.842121,23.842121,0,0,23.842121,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,3.468407,0,0,0,0,0,0,0,0,0,0,0,0,90.769396,0
-c27,0,0,23.842121,0,0,23.842121,23.842121,0,0,23.842121,23.842121,0,0,10.859436,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,145.844667,0
-c28,0,0,23.842121,0,0,0,23.842121,0,0,23.842121,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,103.075615,0
-c29,0,0,0,0,0,0,23.842121,0,0,0,23.842121,0,0,0,0,6.991873,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,70.450741,0
-c30,0,0,23.842121,0,0,23.842121,23.842121,0,0,23.842121,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023423,119.234028,0
-c31,0,0,0,0,0,0,23.842121,0,0,0,23.842121,0,0,0,15.774626,6.991873,0,0,0,0,0,0,0,0,0,0,0,0,3.468407,0,0,0,0,0,0,0,0,0,0,0,0,73.919148,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,26.345144,0,0,0,26.345144,0,0,9.892485,0,0,0,0,0,0,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,67.588367,0
-c2,0,0,26.345144,0,0,0,26.345144,0,0,0,0,0,0,9.892485,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,89.219891,0
-c3,0,0,26.345144,0,0,26.345144,0,0,0,26.345144,26.345144,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,138.810684,0
-c4,0,0,0,0,0,26.345144,0,0,0,26.345144,26.345144,0,0,9.892485,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,95.780269,0
-c5,0,0,0,0,0,26.345144,26.345144,0,0,26.345144,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.005594,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,117.238522,0
-c6,0,0,26.345144,0,0,0,26.345144,0,0,26.345144,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,112.524902,0
-c7,0,0,0,0,0,26.345144,26.345144,0,0,26.345144,26.345144,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,125.521806,0
-c8,0,0,26.345144,0,0,26.345144,26.345144,0,0,26.345144,26.345144,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,5.005594,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,156.872544,0
-c9,0,0,26.345144,0,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.005594,6.852352,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,71.400586,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,6.852352,0
-c11,0,0,26.345144,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0.059362,72.89088,0
-c12,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,13.288878,9.837517,0,0,0,13.288878,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,69.612769,0
-c13,0,0,26.345144,0,0,26.345144,0,0,0,0,0,0,0,0,13.288878,9.837517,0,0,0,13.288878,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,94.111155,0
-c14,0,0,26.345144,0,0,26.345144,0,0,0,0,0,0,0,9.892485,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,62.642135,0
-c15,0,0,26.345144,0,0,26.345144,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,4.920397,0,0,0,0,0,0,0,0,0,0,0,0,97.244707,0
-c16,0,0,0,0,0,26.345144,26.345144,0,0,26.345144,26.345144,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,4.920397,0,0,0,0,0,0,0,0,0,0,0,0,123.589851,0
-c17,0,0,26.345144,0,0,0,26.345144,0,0,26.345144,0,0,0,0,13.288878,9.837517,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,115.510067,0
-c18,0,0,26.345144,0,0,0,26.345144,0,0,0,26.345144,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,5.005594,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,104.182256,0
-c19,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,0,0,0,0,0,4.920397,0,0,0,6.852352,0,0,0,0,0,0,0,0,47.95541,0
-c20,0,0,26.345144,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,71.044122,0
-c21,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,44.639616,0
-c22,0,0,26.345144,0,0,26.345144,26.345144,0,0,26.345144,26.345144,0,0,9.892485,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,154.966445,0
-c23,0,0,0,0,0,0,26.345144,0,0,26.345144,26.345144,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,92.32431,0
-c24,0,0,26.345144,0,0,26.345144,26.345144,0,0,0,26.345144,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,125.521806,0
-c25,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,4.920397,0,0,0,0,0,0,0,0,0,0,0,0,44.554419,0
-c26,0,0,0,0,0,26.345144,26.345144,0,0,26.345144,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,4.920397,0,0,0,0,0,0,0,0,0,0,0,0,97.244707,0
-c27,0,0,26.345144,0,0,26.345144,26.345144,0,0,26.345144,26.345144,0,0,9.892485,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,154.907083,0
-c28,0,0,26.345144,0,0,0,26.345144,0,0,26.345144,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,105.613188,0
-c29,0,0,0,0,0,0,26.345144,0,0,0,26.345144,0,0,0,0,9.837517,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,75.816683,0
-c30,0,0,26.345144,0,0,26.345144,26.345144,0,0,26.345144,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059362,131.785082,0
-c31,0,0,0,0,0,0,26.345144,0,0,0,26.345144,0,0,0,13.288878,9.837517,0,0,0,0,0,0,0,0,0,0,0,0,4.920397,0,0,0,0,0,0,0,0,0,0,0,0,80.73708,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,6.518272,0,0,0,6.518272,0,0,2.573634,0,0,0,0,0,0,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17.03961,0
-c2,0,0,6.518272,0,0,0,6.518272,0,0,0,0,0,0,2.573634,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,22.691665,0
-c3,0,0,6.518272,0,0,6.518272,0,0,0,6.518272,6.518272,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,35.212693,0
-c4,0,0,0,0,0,6.518272,0,0,0,6.518272,6.518272,0,0,2.573634,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,24.200223,0
-c5,0,0,0,0,0,6.518272,6.518272,0,0,6.518272,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.429432,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,29.574293,0
-c6,0,0,6.518272,0,0,0,6.518272,0,0,6.518272,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,28.708076,0
-c7,0,0,0,0,0,6.518272,6.518272,0,0,6.518272,6.518272,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,31.678777,0
-c8,0,0,6.518272,0,0,6.518272,6.518272,0,0,6.518272,6.518272,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,1.429432,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,39.626481,0
-c9,0,0,6.518272,0,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.429432,2.071773,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,18.609522,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,2.071773,0
-c11,0,0,6.518272,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0.013655,18.655888,0
-c12,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,3.533916,2.308683,0,0,0,3.533916,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,17.96656,0
-c13,0,0,6.518272,0,0,6.518272,0,0,0,0,0,0,0,0,3.533916,2.308683,0,0,0,3.533916,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.842491,0
-c14,0,0,6.518272,0,0,6.518272,0,0,0,0,0,0,0,2.573634,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,15.623833,0
-c15,0,0,6.518272,0,0,6.518272,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,1.153683,0,0,0,0,0,0,0,0,0,0,0,0,24.242415,0
-c16,0,0,0,0,0,6.518272,6.518272,0,0,6.518272,6.518272,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,1.153683,0,0,0,0,0,0,0,0,0,0,0,0,30.760687,0
-c17,0,0,6.518272,0,0,0,6.518272,0,0,6.518272,0,0,0,0,3.533916,2.308683,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,28.944986,0
-c18,0,0,6.518272,0,0,0,6.518272,0,0,0,6.518272,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,1.429432,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,26.589937,0
-c19,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,0,0,0,0,0,1.153683,0,0,0,2.071773,0,0,0,0,0,0,0,0,12.052411,0
-c20,0,0,6.518272,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,18.013547,0
-c21,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.48162,0
-c22,0,0,6.518272,0,0,6.518272,6.518272,0,0,6.518272,6.518272,0,0,2.573634,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,38.712565,0
-c23,0,0,0,0,0,0,6.518272,0,0,6.518272,6.518272,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,23.088732,0
-c24,0,0,6.518272,0,0,6.518272,6.518272,0,0,0,6.518272,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,31.678777,0
-c25,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,1.153683,0,0,0,0,0,0,0,0,0,0,0,0,11.205871,0
-c26,0,0,0,0,0,6.518272,6.518272,0,0,6.518272,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,1.153683,0,0,0,0,0,0,0,0,0,0,0,0,24.242415,0
-c27,0,0,6.518272,0,0,6.518272,6.518272,0,0,6.518272,6.518272,0,0,2.573634,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,38.69891,0
-c28,0,0,6.518272,0,0,0,6.518272,0,0,6.518272,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,26.622648,0
-c29,0,0,0,0,0,0,6.518272,0,0,0,6.518272,0,0,0,0,2.308683,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18.879143,0
-c30,0,0,6.518272,0,0,6.518272,6.518272,0,0,6.518272,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013655,32.605015,0
-c31,0,0,0,0,0,0,6.518272,0,0,0,6.518272,0,0,0,3.533916,2.308683,0,0,0,0,0,0,0,0,0,0,0,0,1.153683,0,0,0,0,0,0,0,0,0,0,0,0,20.032826,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,227.205,0,91.4069,0,0,0,89.908,0,0,0,0,0,40.812,110.353,0,0,0,0,0,42.4051,59.1467,0,0,0,0,69.8631,0,0,0,0,0,0,19.8115,30.88,0,0,0,0.0453634,781.8366634,0
-c2,0,0,0,0,0,91.4069,0,0,0,89.908,0,243.339,0,0,0,40.812,110.353,0,0,0,0,0,42.4051,59.1467,0,0,19.9204,0,69.8631,0,0,0,20.2452,69.9894,0,0,0,0,0,0,0,857.3888,0
-c3,13.0911,73.9105,0,0,0,0,0,259.957,0,0,0,0,0,42.4202,0,135.935,0,0,42.4989,0,0,0,0,0,0,0,0,0,0,57.1891,0,20.0592,0,69.9894,0,19.8115,0,71.5971,0,0,0,806.459,0
-c4,13.0911,0,0,0,0,0,0,259.957,0,0,0,0,0,0,0,135.935,0,0,42.4989,59.0727,0,0,0,0,0,0,0,0,0,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,749.8069634,0
-c5,13.0911,0,0,0,0,0,0,0,0,0,0,0,0,0,44.2087,0,110.353,0,42.4989,59.0727,0,0,42.4051,59.1467,0,0,0,0,0,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,610.0284634,0
-c6,13.0911,0,0,0,0,0,0,0,0,0,0,243.339,0,42.4202,0,0,110.353,0,0,0,0,0,42.4051,0,125.831,0,19.9204,0,0,0,0,20.0592,0,69.9894,0,19.8115,30.88,0,0,0,0,738.0999,0
-c7,13.0911,0,0,0,0,0,0,0,0,0,0,0,0,0,44.2087,135.935,0,0,42.4989,0,0,0,42.4051,59.1467,0,0,0,0,0,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,576.5377634,0
-c8,13.0911,0,0,0,0,0,0,0,0,0,0,0,0,0,44.2087,135.935,0,0,42.4989,0,0,0,0,0,0,0,0,0,0,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0,474.9406,0
-c9,13.0911,0,0,0,0,0,0,0,0,89.908,0,0,0,42.4202,0,0,110.353,0,0,0,0,0,42.4051,0,125.831,0,0,0,0,57.1891,0,20.0592,31.2805,0,0,0,0,71.5971,0,0,0.0453634,604.1796634,0
-c10,13.0911,73.9105,0,227.205,0,0,89.4069,259.957,0,0,0,0,0,42.4202,60.9,40.812,110.353,0,42.4989,59.0727,0,0,0,0,0,0,19.9204,31.1377,19.8329,57.1891,0,20.0592,0,69.9894,0,0,0,0,0,0,0.0453634,1237.8013634,0
-c11,0,0,0,0,0,0,0,0,0,89.908,0,243.339,0,0,0,0,0,0,0,0,0,0,0,0,0,0,19.9204,31.1377,19.8329,57.1891,0,20.0592,31.2805,0,0,19.8115,0,0,0,0,0,532.4783,0
-c12,13.0911,73.9105,103.065,0,0,0,0,259.957,0,0,85.9711,243.339,0,42.4202,0,0,0,0,0,0,0,0,42.4051,59.1467,0,0,19.9204,0,0,57.1891,0,0,20.2452,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,1182.6643634,0
-c13,13.0911,73.9105,0,0,0,0,0,259.957,0,89.908,0,243.339,0,0,0,0,0,0,42.4989,0,0,0,42.4051,59.1467,0,0,0,0,19.8329,57.1891,0,0,20.2452,69.9894,0,0,0,0,0,0,0,991.5129,0
-c14,0,79.905,0,0,0,0,0,0,0,0,85.9711,243.339,0,0,0,0,0,0,0,0,0,0,42.4051,59.1467,0,0,19.9204,31.1377,19.8329,57.1891,0,20.0592,0,69.9894,0,19.8115,0,71.5971,0,0,0,820.3042,0
-c15,13.0911,0,0,0,0,0,0,0,0,0,0,0,0,42.4202,0,135.935,0,0,42.4989,59.0727,0,0,0,0,0,0,0,20.4791,0,0,0,20.0592,0,69.9894,0,19.8115,0,71.5971,0,0,0,494.9542,0
-c16,13.0911,73.9105,103.065,0,0,0,0,0,0,0,0,0,0,0,44.2087,135.935,0,0,42.4989,0,0,0,0,0,0,0,19.9204,31.1377,0,0,0,0,20.2452,69.9894,0,0,0,0,0,0,0.0453634,554.0472634,0
-c17,13.0911,0,0,0,0,91.4069,0,0,0,0,0,0,0,42.4202,0,0,0,0,0,0,0,0,42.4051,59.1467,0,0,19.9204,0,69.8631,0,0,20.0592,0,69.9894,0,19.8115,30.88,0,0,0,0,478.9936,0
-c18,13.0911,0,0,0,0,91.4069,0,0,0,89.908,0,0,0,0,44.2087,135.935,0,0,42.4989,0,0,0,0,0,0,0,0,0,0,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,656.3008634,0
-c19,13.0911,0,0,0,0,91.4069,126.901,0,0,0,85.9711,243.339,0,42.4202,60.9,0,0,0,42.4989,59.0727,0,0,42.4051,0,125.831,0,0,0,0,0,0,0,0,0,0,0,20.3721,71.5971,0,0,0.0453634,1025.8515634,0
-c20,0,0,0,0,0,0,0,0,0,0,85.9711,243.339,0,0,0,40.812,110.353,0,0,0,0,0,42.4051,0,125.831,0,0,0,0,0,0,0,20.2452,69.9894,0,0,20.3721,71.5971,0,0,0,830.915,0
-c21,0,0,0,0,0,0,89.4069,259.957,0,89.908,0,243.339,0,42.4202,0,135.935,0,0,0,0,0,0,0,0,0,0,0,0,19.8329,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,1120.0512634,0
-c22,13.0911,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,110.353,0,42.4989,0,125.787,0,0,0,0,0,0,0,19.8329,57.1891,0,20.0592,0,69.9894,0,19.8115,0,71.5971,0,0,0,550.2092,0
-c23,13.0911,0,0,0,0,91.4069,0,0,0,0,0,0,0,0,44.2087,135.935,0,0,42.4989,0,0,0,0,0,0,0,0,0,19.8329,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,586.2257634,0
-c24,13.0911,0,0,0,0,0,0,0,0,0,0,0,0,42.4202,0,0,110.353,0,42.4989,0,0,0,0,41.836,125.831,0,0,0,0,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,615.2824634,0
-c25,13.0911,73.9105,0,0,0,0,0,0,0,0,85.9711,243.339,0,42.4202,60.9,0,0,0,0,41.783,125.787,0,0,0,125.831,0,19.9204,31.1377,0,0,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,1046.1541634,0
-c26,0,79.905,84.5406,227.205,0,0,0,0,0,0,0,243.339,0,0,0,0,110.353,0,0,41.783,125.787,0,0,41.836,125.831,0,0,20.4791,0,0,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,1283.1218634,0
-c27,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,42.4989,0,125.787,0,42.4051,59.1467,0,0,19.9204,31.1377,19.8329,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,659.8859634,0
-c28,13.0911,0,0,0,0,0,0,0,0,0,0,243.339,0,42.4202,0,0,110.353,0,0,0,125.787,0,42.4051,59.1467,0,0,0,20.4791,69.8631,0,0,20.0592,0,69.9894,0,19.8115,30.88,0,0,0,0.0453634,867.6697634,0
-c29,13.0911,0,0,0,0,91.4069,0,0,0,89.908,0,0,0,42.4202,60.9,0,0,0,0,0,0,0,0,0,0,0,0,20.4791,69.8631,0,0,0,20.2452,69.9894,0,19.8115,0,71.5971,0,0,0.0453634,569.7569634,0
-c30,13.0911,0,0,0,0,0,0,0,0,0,0,0,0,0,44.2087,135.935,0,0,42.4989,0,125.787,0,0,0,0,0,19.9204,0,0,57.1891,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0,620.648,0
-c31,0,79.905,84.5406,227.205,0,0,0,0,0,0,0,0,0,42.4202,0,0,0,0,0,41.783,125.787,0,0,41.836,125.831,0,0,20.4791,0,0,0,20.0592,0,69.9894,0,0,20.3721,71.5971,0,0,0.0453634,971.8500634,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,54.8381,0,20.9916,0,0,0,21.0334,0,0,0,0,0,10.5675,27.8587,0,0,0,0,0,10.5613,14.7899,0,0,0,0,16.9562,0,0,0,0,0,0,5.3629,8.01787,0,0,0,0.182164,191.159634,0
-c2,0,0,0,0,0,20.9916,0,0,0,21.0334,0,60.3765,0,0,0,10.5675,27.8587,0,0,0,0,0,10.5613,14.7899,0,0,5.36591,0,16.9562,0,0,0,5.41437,17.215,0,0,0,0,0,0,0,211.13038,0
-c3,6.3224,20.9352,0,0,0,0,0,62.7404,0,0,0,0,0,10.5944,0,32.0547,0,0,10.5792,0,0,0,0,0,0,0,0,0,0,14.405,0,5.36257,0,17.215,0,5.3629,0,17.4758,0,0,0,203.04757,0
-c4,6.3224,0,0,0,0,0,0,62.7404,0,0,0,0,0,0,0,32.0547,0,0,10.5792,14.881,0,0,0,0,0,0,0,0,0,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,186.588244,0
-c5,6.3224,0,0,0,0,0,0,0,0,0,0,0,0,0,10.6041,0,27.8587,0,10.5792,14.881,0,0,10.5613,14.7899,0,0,0,0,0,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,155.607144,0
-c6,6.3224,0,0,0,0,0,0,0,0,0,0,60.3765,0,10.5944,0,0,27.8587,0,0,0,0,0,10.5613,0,31.0414,0,5.36591,0,0,0,0,5.36257,0,17.215,0,5.3629,8.01787,0,0,0,0,188.07895,0
-c7,6.3224,0,0,0,0,0,0,0,0,0,0,0,0,0,10.6041,32.0547,0,0,10.5792,0,0,0,10.5613,14.7899,0,0,0,0,0,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,144.922144,0
-c8,6.3224,0,0,0,0,0,0,0,0,0,0,0,0,0,10.6041,32.0547,0,0,10.5792,0,0,0,0,0,0,0,0,0,0,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0,119.38878,0
-c9,6.3224,0,0,0,0,0,0,0,0,21.0334,0,0,0,10.5944,0,0,27.8587,0,0,0,0,0,10.5613,0,31.0414,0,0,0,0,14.405,0,5.36257,8.10378,0,0,0,0,17.4758,0,0,0.182164,152.940914,0
-c10,6.3224,20.9352,0,54.8381,0,0,21.0449,62.7404,0,0,0,0,0,10.5944,14.972,10.5675,27.8587,0,10.5792,14.881,0,0,0,0,0,0,5.36591,8.07266,5.36951,14.405,0,5.36257,0,17.215,0,0,0,0,0,0,0.182164,311.306614,0
-c11,0,0,0,0,0,0,0,0,0,21.0334,0,60.3765,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.36591,8.07266,5.36951,14.405,0,5.36257,8.10378,0,0,5.3629,0,0,0,0,0,133.45223,0
-c12,6.3224,20.9352,27.8734,0,0,0,0,62.7404,0,0,20.9627,60.3765,0,10.5944,0,0,0,0,0,0,0,0,10.5613,14.7899,0,0,5.36591,0,0,14.405,0,0,5.41437,17.215,0,0,5.37001,17.4758,0,0,0.182164,300.584454,0
-c13,6.3224,20.9352,0,0,0,0,0,62.7404,0,21.0334,0,60.3765,0,0,0,0,0,0,10.5792,0,0,0,10.5613,14.7899,0,0,0,0,5.36951,14.405,0,0,5.41437,17.215,0,0,0,0,0,0,0,249.74218,0
-c14,0,27.627,0,0,0,0,0,0,0,0,20.9627,60.3765,0,0,0,0,0,0,0,0,0,0,10.5613,14.7899,0,0,5.36591,8.07266,5.36951,14.405,0,5.36257,0,17.215,0,5.3629,0,17.4758,0,0,0,212.94675,0
-c15,6.3224,0,0,0,0,0,0,0,0,0,0,0,0,10.5944,0,32.0547,0,0,10.5792,14.881,0,0,0,0,0,0,0,5.37147,0,0,0,5.36257,0,17.215,0,5.3629,0,17.4758,0,0,0,125.21944,0
-c16,6.3224,20.9352,27.8734,0,0,0,0,0,0,0,0,0,0,0,10.6041,32.0547,0,0,10.5792,0,0,0,0,0,0,0,5.36591,8.07266,0,0,0,0,5.41437,17.215,0,0,0,0,0,0,0.182164,144.619104,0
-c17,6.3224,0,0,0,0,20.9916,0,0,0,0,0,0,0,10.5944,0,0,0,0,0,0,0,0,10.5613,14.7899,0,0,5.36591,0,16.9562,0,0,5.36257,0,17.215,0,5.3629,8.01787,0,0,0,0,121.54005,0
-c18,6.3224,0,0,0,0,20.9916,0,0,0,21.0334,0,0,0,0,10.6041,32.0547,0,0,10.5792,0,0,0,0,0,0,0,0,0,0,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,161.595944,0
-c19,6.3224,0,0,0,0,20.9916,29.7996,0,0,0,20.9627,60.3765,0,10.5944,14.972,0,0,0,10.5792,14.881,0,0,10.5613,0,31.0414,0,0,0,0,0,0,0,0,0,0,0,5.37001,17.4758,0,0,0.182164,254.110074,0
-c20,0,0,0,0,0,0,0,0,0,0,20.9627,60.3765,0,0,0,10.5675,27.8587,0,0,0,0,0,10.5613,0,31.0414,0,0,0,0,0,0,0,5.41437,17.215,0,0,5.37001,17.4758,0,0,0,206.84328,0
-c21,0,0,0,0,0,0,21.0449,62.7404,0,21.0334,0,60.3765,0,10.5944,0,32.0547,0,0,0,0,0,0,0,0,0,0,0,0,5.36951,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,273.224354,0
-c22,6.3224,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.8587,0,10.5792,0,31.2262,0,0,0,0,0,0,0,5.36951,14.405,0,5.36257,0,17.215,0,5.3629,0,17.4758,0,0,0,141.17728,0
-c23,6.3224,0,0,0,0,20.9916,0,0,0,0,0,0,0,0,10.6041,32.0547,0,0,10.5792,0,0,0,0,0,0,0,0,0,5.36951,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,145.932054,0
-c24,6.3224,0,0,0,0,0,0,0,0,0,0,0,0,10.5944,0,0,27.8587,0,10.5792,0,0,0,0,10.5466,31.0414,0,0,0,0,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,156.953244,0
-c25,6.3224,20.9352,0,0,0,0,0,0,0,0,20.9627,60.3765,0,10.5944,14.972,0,0,0,0,10.5947,31.2262,0,0,0,31.0414,0,5.36591,8.07266,0,0,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,266.069614,0
-c26,0,27.627,20.9195,54.8381,0,0,0,0,0,0,0,60.3765,0,0,0,0,27.8587,0,0,10.5947,31.2262,0,0,10.5466,31.0414,0,0,5.37147,0,0,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,326.005714,0
-c27,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.5792,0,31.2262,0,10.5613,14.7899,0,0,5.36591,8.07266,5.36951,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,173.602224,0
-c28,6.3224,0,0,0,0,0,0,0,0,0,0,60.3765,0,10.5944,0,0,27.8587,0,0,0,31.2262,0,10.5613,14.7899,0,0,0,5.37147,16.9562,0,0,5.36257,0,17.215,0,5.3629,8.01787,0,0,0,0.182164,220.197574,0
-c29,6.3224,0,0,0,0,20.9916,0,0,0,21.0334,0,0,0,10.5944,14.972,0,0,0,0,0,0,0,0,0,0,0,0,5.37147,16.9562,0,0,0,5.41437,17.215,0,5.3629,0,17.4758,0,0,0.182164,141.891704,0
-c30,6.3224,0,0,0,0,0,0,0,0,0,0,0,0,0,10.6041,32.0547,0,0,10.5792,0,31.2262,0,0,0,0,0,5.36591,0,0,14.405,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0,155.98089,0
-c31,0,27.627,20.9195,54.8381,0,0,0,0,0,0,0,0,0,10.5944,0,0,0,0,0,10.5947,31.2262,0,0,10.5466,31.0414,0,0,5.37147,0,0,0,5.36257,0,17.215,0,0,5.37001,17.4758,0,0,0.182164,248.364914,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,137.165,207.8389,189.6515,31.9722,19.0941,208.4406,198.6494,31.9182,19.1354,259.7649,285.9489,32.0305,19.0951,123.27345,142.4026,83.6932,18.2347,9.39785,151.93707,142.1728,18.074,9.41748,151.49962,142.1021,18.0244,9.40974,77.39211,95.8733,48.1179,14.1964,4.87483,100.88243,95.9383,14.534,4.81198,100.7067,95.9563,14.737,4.81593,7.47447,0.725589,3341.380949,0.999999970072255
-c1,137.165,207.8389,189.6515,80.0402,16.7972,229.4322,15.542688,24.9686,17.3272,280.7983,15.542688,25.4775,16.8139,7.12909,148.08761,94.2607,39.328,8.57887,157.84032,176.87812,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,4.889272,95.8733,67.88834,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,106.0696,97.03603,11.2081,4.48848,5.19545,0.907753,2876.180231,1.16174250723641
-c2,137.165,207.8389,15.542688,25.2021,16.7972,229.4322,15.542688,24.9686,17.3272,280.7983,285.9489,85.854,16.8139,7.12909,148.08761,94.2607,39.328,8.57887,10.338268,10.338268,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,82.75802,95.8733,67.88834,11.2873,4.46626,80.09278,101.35267,28.3993,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,0.017259,2747.170141,1.21629919366908
-c3,215.0729,228.7741,15.542688,25.2021,16.7972,15.542688,198.6494,87.709,17.3272,15.542688,15.542688,25.4775,16.8139,133.86785,142.4026,131.96492,11.4693,8.57887,162.51627,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,7.838173,48.1179,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,95.9563,28.6839,4.48848,5.19545,1.571852,2340.925283,1.42737610231637
-c4,215.0729,342.9757,328.4669,25.2021,16.7972,15.542688,198.6494,87.709,17.3272,15.542688,15.542688,25.4775,16.8139,7.12909,142.4026,131.96492,11.4693,8.57887,162.51627,191.75912,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,48.1179,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,2977.8539,1.1220768207575
-c5,215.0729,342.9757,328.4669,25.2021,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,15.542688,25.4775,16.8139,104.7504,153.0067,83.6932,39.328,8.57887,162.51627,191.75912,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,4.889272,7.838173,48.1179,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,2768.92595,1.20674257407488
-c6,215.0729,342.9757,15.542688,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,15.542688,285.9489,85.854,16.8139,133.86785,10.338268,83.6932,39.328,8.57887,10.338268,176.87812,11.2953,8.59732,162.06092,142.1021,42.2766,8.56125,82.75802,7.838173,50.93214,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,97.03603,11.2081,4.48848,5.19545,0.017259,2888.693022,1.15671025196563
-c7,215.0729,342.9757,328.4669,25.2021,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,15.542688,25.4775,16.8139,104.7504,153.0067,131.96492,11.4693,8.57887,162.51627,10.338268,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,51.58494,7.838173,48.1179,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,2654.613786,1.2587069504239
-c8,215.0729,342.9757,15.542688,25.2021,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,15.542688,25.4775,16.8139,104.7504,153.0067,131.96492,11.4693,8.57887,162.51627,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,4.889272,7.838173,48.1179,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,1.571852,2299.855565,1.45286549927906
-c9,215.0729,342.9757,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,280.7983,15.542688,25.4775,16.8139,133.86785,142.4026,83.6932,39.328,8.57887,157.84032,176.87812,11.2953,8.59732,162.06092,142.1021,42.2766,8.56125,4.889272,7.838173,48.1179,25.6923,4.46626,106.245,97.19258,11.1843,4.46968,7.838173,95.9563,28.6839,4.48848,5.19545,0.907753,3283.532547,1.01761770270591
-c10,215.0729,228.7741,189.6515,80.0402,16.7972,356.8393,219.6943,87.709,17.3272,399.7504,388.4166,25.4775,16.8139,133.86785,163.05961,94.2607,39.328,8.57887,162.51627,191.75912,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,82.75802,82.58087,53.48741,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,7.838173,11.2081,4.48848,5.19545,0.907753,4036.969736,0.827695297399294
-c11,137.165,207.8389,15.542688,25.2021,16.7972,15.542688,359.5265,24.9686,17.3272,280.7983,285.9489,85.854,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,82.75802,82.58087,53.48741,25.6923,4.46626,106.245,97.19258,11.1843,4.46968,106.0696,7.838173,11.2081,4.48848,5.19545,0.017259,3029.928206,1.10279208335829
-c12,215.0729,228.7741,356.3403,25.2021,16.7972,15.542688,198.6494,87.709,17.3272,399.7504,306.9116,85.854,16.8139,133.86785,10.338268,4.615243,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,82.75802,7.838173,48.1179,25.6923,4.46626,80.09278,101.35267,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,3314.944443,1.00797491652035
-c13,215.0729,228.7741,15.542688,25.2021,16.7972,15.542688,198.6494,87.709,17.3272,280.7983,285.9489,85.854,16.8139,104.7504,10.338268,4.615243,11.4693,8.57887,162.51627,10.338268,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,4.889272,74.50821,53.48741,25.6923,4.46626,80.09278,101.35267,28.3993,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,2774.841479,1.2041699873202
-c14,137.165,370.6027,15.542688,25.2021,16.7972,15.542688,359.5265,24.9686,17.3272,399.7504,306.9116,85.854,16.8139,7.12909,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,82.75802,82.58087,53.48741,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,95.9563,28.6839,4.48848,5.19545,0.017259,3518.882435,0.949557399477104
-c15,215.0729,342.9757,15.542688,25.2021,16.7972,15.542688,359.5265,24.9686,17.3272,15.542688,388.4166,25.4775,16.8139,133.86785,142.4026,131.96492,11.4693,8.57887,162.51627,191.75912,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,101.24477,2.306963,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,95.9563,28.6839,4.48848,5.19545,1.571852,3140.749007,1.06388025122823
-c16,215.0729,228.7741,356.3403,25.2021,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,15.542688,25.4775,16.8139,104.7504,153.0067,131.96492,11.4693,8.57887,162.51627,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,82.75802,82.58087,2.306963,11.2873,4.46626,80.09278,101.35267,28.3993,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,0.907753,2567.661186,1.30133244880026
-c17,215.0729,342.9757,15.542688,25.2021,16.7972,229.4322,15.542688,24.9686,17.3272,15.542688,388.4166,25.4775,16.8139,133.86785,10.338268,4.615243,11.4693,8.57887,10.338268,176.87812,11.2953,8.59732,162.06092,190.78621,11.2352,8.56125,82.75802,95.8733,67.88834,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,97.03603,11.2081,4.48848,5.19545,0.017259,2819.074502,1.18527581591117
-c18,215.0729,342.9757,15.542688,25.2021,16.7972,229.4322,15.542688,24.9686,17.3272,280.7983,15.542688,25.4775,16.8139,104.7504,153.0067,131.96492,11.4693,8.57887,162.51627,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,4.889272,7.838173,48.1179,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,2778.33659,1.20265515732004
-c19,215.0729,342.9757,15.542688,25.2021,16.7972,229.4322,389.3261,24.9686,17.3272,399.7504,306.9116,85.854,16.8139,133.86785,163.05961,4.615243,11.4693,8.57887,162.51627,191.75912,11.2953,8.59732,162.06092,142.1021,42.2766,8.56125,51.58494,74.50821,2.306963,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,3615.9012,0.924079689066734
-c20,137.165,207.8389,15.542688,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,306.9116,85.854,16.8139,104.7504,148.08761,94.2607,39.328,8.57887,157.84032,10.338268,11.2953,8.59732,162.06092,142.1021,42.2766,8.56125,4.889272,74.50821,50.93214,11.2873,4.46626,80.09278,101.35267,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.017259,3155.566605,1.05888458757838
-c21,137.165,207.8389,15.542688,25.2021,16.7972,356.8393,219.6943,87.709,17.3272,280.7983,285.9489,85.854,16.8139,133.86785,142.4026,131.96492,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,4.889272,74.50821,53.48741,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,3376.250661,0.98967203135419
-c22,215.0729,342.9757,15.542688,25.2021,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,15.542688,25.4775,16.8139,7.12909,10.338268,83.6932,39.328,8.57887,162.51627,142.1728,42.5215,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,53.48741,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,95.9563,28.6839,4.48848,5.19545,0.017259,2329.267387,1.43452006592148
-c23,215.0729,342.9757,328.4669,25.2021,16.7972,229.4322,15.542688,24.9686,17.3272,15.542688,15.542688,25.4775,16.8139,104.7504,153.0067,131.96492,11.4693,8.57887,162.51627,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,53.48741,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,2944.740405,1.13469453193805
-c24,215.0729,342.9757,15.542688,25.2021,16.7972,15.542688,15.542688,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,133.86785,142.4026,83.6932,39.328,8.57887,162.51627,10.338268,11.2953,8.59732,181.04838,152.6487,42.2766,8.56125,51.58494,7.838173,48.1179,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,2735.888966,1.2213144862212
-c25,215.0729,228.7741,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,306.9116,85.854,16.8139,133.86785,163.05961,99.91022,11.4693,8.57887,157.84032,152.7675,42.5215,8.59732,10.338268,142.1021,42.2766,8.56125,82.75802,82.58087,2.306963,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,3701.151142,0.902795030660355
-c26,137.165,370.6027,210.571,80.0402,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,285.9489,85.854,16.8139,104.7504,10.338268,83.6932,39.328,8.57887,157.84032,152.7675,42.5215,8.59732,181.04838,152.6487,42.2766,8.56125,51.58494,101.24477,2.306963,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,2923.037838,1.14311925464992
-c27,137.165,370.6027,15.542688,25.2021,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,15.542688,25.4775,16.8139,7.12909,10.338268,99.91022,11.4693,8.57887,162.51627,142.1728,42.5215,8.59732,162.06092,190.78621,11.2352,8.56125,82.75802,82.58087,53.48741,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,2293.410251,1.45694857771225
-c28,215.0729,342.9757,15.542688,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,15.542688,285.9489,85.854,16.8139,133.86785,10.338268,83.6932,39.328,8.57887,10.338268,142.1728,42.5215,8.59732,162.06092,190.78621,11.2352,8.56125,51.58494,101.24477,67.88834,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,106.0696,97.03603,11.2081,4.48848,5.19545,0.907753,2982.936823,1.12016480242549
-c29,215.0729,342.9757,328.4669,25.2021,16.7972,229.4322,15.542688,24.9686,17.3272,280.7983,15.542688,25.4775,16.8139,133.86785,163.05961,4.615243,11.4693,8.57887,10.338268,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,101.24477,67.88834,11.2873,4.46626,80.09278,101.35267,28.3993,4.46968,106.0696,95.9563,28.6839,4.48848,5.19545,0.907753,3152.04642,1.06006714298113
-c30,215.0729,342.9757,15.542688,25.2021,16.7972,15.542688,15.542688,24.9686,17.3272,15.542688,15.542688,25.4775,16.8139,104.7504,153.0067,131.96492,11.4693,8.57887,162.51627,142.1728,42.5215,8.59732,181.04838,175.99631,11.2352,8.56125,82.75802,95.8733,48.1179,25.6923,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.017259,2627.265579,1.27180930946877
-c31,137.165,370.6027,210.571,80.0402,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,133.86785,10.338268,4.615243,11.4693,8.57887,157.84032,152.7675,42.5215,8.59732,181.04838,152.6487,42.2766,8.56125,51.58494,101.24477,2.306963,11.2873,4.46626,106.245,95.9383,28.3993,4.46968,90.82636,101.32631,28.6839,4.48848,5.19545,0.907753,3239.940243,1.03130940550161
-c27,2293.410251
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_results1.csv b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_results1.csv
deleted file mode 100644
index 0631464ab20d0003a9923616f97d3d2fde356614..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_results1.csv
+++ /dev/null
@@ -1,418 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,367.1935,674.9156,734.4923,131.604,78.945,930.3708,938.716,154.555,92.4142,1252.9937,1385.115,157.253,93.1857,596.759,690.021,401.5683,85.0395,42.293,719.1726,678.3662,85.9011,43.1252,726.3655,686.3864,86.3285,43.6635,373.8222,471.2729,233.5022,67.2674,20.4251,491.0196,475.3594,71.0934,21.0221,501.5035,483.9556,73.1106,21.5459,35.1354,0.5632148,15217.3421148,0.99999999342855
-c1,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c2,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,13.092790,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15318.246069,0.993412819386321
-c3,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16698.047889,0.911324612602897
-c4,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16698.047889,0.911324612602897
-c5,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c6,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16698.047889,0.911324612602897
-c7,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c8,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15501.448142,0.98167228488818
-c9,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c10,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14041.133564,1.08376876674963
-c11,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15501.448142,0.98167228488818
-c12,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c13,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,13.092790,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15318.246069,0.993412819386321
-c14,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16698.047889,0.911324612602897
-c15,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16698.047889,0.911324612602897
-c16,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c17,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16698.047889,0.911324612602897
-c18,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c19,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15501.448142,0.98167228488818
-c20,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c21,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14041.133564,1.08376876674963
-c22,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,17.457054,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13178.986828,1.15466706188693
-c23,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c24,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,13.092790,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15318.246069,0.993412819386321
-c25,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16698.047889,0.911324612602897
-c26,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16698.047889,0.911324612602897
-c27,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c28,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16698.047889,0.911324612602897
-c29,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c30,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15501.448142,0.98167228488818
-c31,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16291.35512,0.934074661641317
-c32,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14041.133564,1.08376876674963
-c33,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,17013.73903,0.894414919526276
-c22,13178.986828
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,137.165,207.8389,189.6515,31.9722,19.0941,208.4406,198.6494,31.9182,19.1354,259.7649,285.9489,32.0305,19.0951,123.27345,142.4026,83.6932,18.2347,9.39785,151.93707,142.1728,18.074,9.41748,151.49962,142.1021,18.0244,9.40974,77.39211,95.8733,48.1179,14.1964,4.87483,100.88243,95.9383,14.534,4.81198,100.7067,95.9563,14.737,4.81593,7.47447,0.725589,3341.380949,0.999999970072255
-c1,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c2,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,6.804352,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3734.046766,0.894841727731009
-c3,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4055.156502,0.823983208774732
-c4,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4055.156502,0.823983208774732
-c5,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c6,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4055.156502,0.823983208774732
-c7,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c8,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3773.780294,0.885420082820008
-c9,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c10,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3434.130502,0.9729918096458
-c11,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3773.780294,0.885420082820008
-c12,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c13,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,6.804352,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3734.046766,0.894841727731009
-c14,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4055.156502,0.823983208774732
-c15,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4055.156502,0.823983208774732
-c16,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c17,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4055.156502,0.823983208774732
-c18,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c19,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3773.780294,0.885420082820008
-c20,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c21,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3434.130502,0.9729918096458
-c22,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3223.930276,1.03643086521792
-c23,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c24,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,6.804352,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3734.046766,0.894841727731009
-c25,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4055.156502,0.823983208774732
-c26,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4055.156502,0.823983208774732
-c27,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c28,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4055.156502,0.823983208774732
-c29,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c30,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3773.780294,0.885420082820008
-c31,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3954.706354,0.844912508138335
-c32,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3434.130502,0.9729918096458
-c33,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4123.898312,0.810248123299309
-c22,3223.930276
-
-Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,367.1935,674.9156,734.4923,131.604,78.945,930.3708,938.716,154.555,92.4142,1252.9937,1385.115,157.253,93.1857,596.759,690.021,401.5683,85.0395,42.293,719.1726,678.3662,85.9011,43.1252,726.3655,686.3864,86.3285,43.6635,373.8222,471.2729,233.5022,67.2674,20.4251,491.0196,475.3594,71.0934,21.0221,501.5035,483.9556,73.1106,21.5459,35.1354,0.5632148,15217.3421148,0.99999999342855
-c1,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c2,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,42.156294,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15474.92783,0.98335463555209
-c3,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16796.602642,0.905977378196184
-c4,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16796.602642,0.905977378196184
-c5,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c6,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16796.602642,0.905977378196184
-c7,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c8,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15645.895789,0.972609189193103
-c9,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c10,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14229.352837,1.06943317677018
-c11,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15645.895789,0.972609189193103
-c12,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c13,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,42.156294,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15474.92783,0.98335463555209
-c14,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16796.602642,0.905977378196184
-c15,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16796.602642,0.905977378196184
-c16,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c17,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16796.602642,0.905977378196184
-c18,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c19,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15645.895789,0.972609189193103
-c20,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c21,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14229.352837,1.06943317677018
-c22,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,67.644319,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13388.329862,1.1366124197709
-c23,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c24,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,42.156294,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15474.92783,0.98335463555209
-c25,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16796.602642,0.905977378196184
-c26,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16796.602642,0.905977378196184
-c27,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c28,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16796.602642,0.905977378196184
-c29,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c30,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15645.895789,0.972609189193103
-c31,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16400.323624,0.927868398873808
-c32,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14229.352837,1.06943317677018
-c33,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,17093.64403,0.890233937191484
-c22,13388.329862
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,43.346653,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,34.5639,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c10,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,55.391373,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,34.5639,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,43.346653,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,34.5639,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c21,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,55.391373,0
-c22,0,0,0,0,0,0,23.842121,0,0,0,23.842121,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,63.458868,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,43.346653,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,34.5639,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0
-c32,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,55.391373,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,33.430108,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,29.978747,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c10,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,52.9229,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,29.978747,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,33.430108,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,29.978747,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c21,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,52.9229,0
-c22,0,0,0,0,0,0,26.345144,0,0,0,26.345144,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,65.979166,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,33.430108,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,29.978747,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0
-c32,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,52.9229,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,9.139605,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,7.914372,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c10,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.586104,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,7.914372,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,9.139605,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,7.914372,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c21,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.586104,0
-c22,0,0,0,0,0,0,6.518272,0,0,0,6.518272,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.57046,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,9.139605,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,7.914372,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0
-c32,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.586104,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c2,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c3,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c4,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c5,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c6,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c7,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c8,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c9,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c10,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c11,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c12,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c13,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c14,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c15,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c16,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c17,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c18,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c19,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c20,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c21,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c22,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c23,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c24,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c25,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c26,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c27,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c28,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c29,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c30,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c31,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c32,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c33,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c2,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c3,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c4,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c5,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c6,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c7,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c8,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c9,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c10,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c11,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c12,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c13,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c14,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c15,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c16,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c17,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c18,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c19,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c20,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c21,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c22,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c23,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c24,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c25,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c26,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c27,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c28,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c29,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c30,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c31,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c32,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c33,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,137.165,207.8389,189.6515,31.9722,19.0941,208.4406,198.6494,31.9182,19.1354,259.7649,285.9489,32.0305,19.0951,123.27345,142.4026,83.6932,18.2347,9.39785,151.93707,142.1728,18.074,9.41748,151.49962,142.1021,18.0244,9.40974,77.39211,95.8733,48.1179,14.1964,4.87483,100.88243,95.9383,14.534,4.81198,100.7067,95.9563,14.737,4.81593,7.47447,0.725589,3341.380949,0.999999970072255
-c1,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c2,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,10.338268,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3770.813371,0.886116742368029
-c3,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4084.855275,0.817992472744522
-c4,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4084.855275,0.817992472744522
-c5,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c6,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4084.855275,0.817992472744522
-c7,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c8,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3809.321666,0.877159020491103
-c9,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c10,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3475.343606,0.961453378907899
-c11,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3809.321666,0.877159020491103
-c12,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c13,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,10.338268,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3770.813371,0.886116742368029
-c14,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4084.855275,0.817992472744522
-c15,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4084.855275,0.817992472744522
-c16,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c17,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4084.855275,0.817992472744522
-c18,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c19,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3809.321666,0.877159020491103
-c20,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c21,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3475.343606,0.961453378907899
-c22,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3268.127736,1.02241439646059
-c23,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c24,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,10.338268,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3770.813371,0.886116742368029
-c25,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4084.855275,0.817992472744522
-c26,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4084.855275,0.817992472744522
-c27,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c28,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4084.855275,0.817992472744522
-c29,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c30,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3809.321666,0.877159020491103
-c31,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3985.86727,0.838307108296983
-c32,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3475.343606,0.961453378907899
-c33,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4151.525312,0.804856195590596
-c22,3268.127736
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_results2.csv b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_results2.csv
deleted file mode 100644
index abf09ee84725703f377f9ee103c0c119469018aa..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_results2.csv
+++ /dev/null
@@ -1,550 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,367.1935,674.9156,734.4923,131.604,78.945,930.3708,938.716,154.555,92.4142,1252.9937,1385.115,157.253,93.1857,596.759,690.021,401.5683,85.0395,42.293,719.1726,678.3662,85.9011,43.1252,726.3655,686.3864,86.3285,43.6635,373.8222,471.2729,233.5022,67.2674,20.4251,491.0196,475.3594,71.0934,21.0221,501.5035,483.9556,73.1106,21.5459,35.1354,0.5632148,15217.3421148,0.99999999342855
-c1,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13975.779396,1.08883673494959
-c2,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13975.779396,1.08883673494959
-c3,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16595.159293,0.916974748746241
-c4,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,13.092790,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13644.186023,1.11529863178487
-c5,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,13.092790,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,2.182132,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13539.421255,1.12392854286792
-c6,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,10.910659,50.3589,22.1017,23.3959,6.45743,13297.738482,1.14435563768703
-c7,367.1935,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14992.005674,1.01503043316531
-c8,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,17.457054,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,13.092790,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,6.546395,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13829.134659,1.10038280629933
-c9,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,17.457054,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,10.910659,50.3589,22.1017,23.3959,6.45743,15057.140543,1.01063956800287
-c10,367.1935,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14267.452864,1.06657734587924
-c11,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,2.182132,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15281.976974,0.995770510655328
-c12,367.1935,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14676.314533,1.03686398767873
-c13,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,6.546395,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14849.218642,1.02479075695469
-c14,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,13.092790,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15473.658942,0.983435273679982
-c15,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13975.779396,1.08883673494959
-c16,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13975.779396,1.08883673494959
-c17,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13975.779396,1.08883673494959
-c18,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,13.092790,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13644.186023,1.11529863178487
-c19,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,17.457054,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,13.092790,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,6.546395,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13829.134659,1.10038280629933
-c20,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,6.546395,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14849.218642,1.02479075695469
-c21,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,10.910659,50.3589,22.1017,23.3959,6.45743,13297.738482,1.14435563768703
-c22,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,17.457054,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,10.910659,50.3589,22.1017,23.3959,6.45743,15057.140543,1.01063956800287
-c23,367.1935,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14267.452864,1.06657734587924
-c24,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,2.182132,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15281.976974,0.995770510655328
-c25,367.1935,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14676.314533,1.03686398767873
-c26,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,13.092790,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15473.658942,0.983435273679982
-c27,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,17.457054,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,13.092790,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,10.910659,50.3589,22.1017,23.3959,6.45743,12056.745877,1.26214337963405
-c28,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,13.092790,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,2.182132,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13539.421255,1.12392854286792
-c29,367.1935,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14992.005674,1.01503043316531
-c30,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13975.779396,1.08883673494959
-c31,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13975.779396,1.08883673494959
-c32,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16595.159293,0.916974748746241
-c33,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,13.092790,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13644.186023,1.11529863178487
-c34,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13975.779396,1.08883673494959
-c35,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,13.092790,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15473.658942,0.983435273679982
-c36,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13975.779396,1.08883673494959
-c37,367.1935,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14992.005674,1.01503043316531
-c38,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,10.910659,50.3589,22.1017,23.3959,6.45743,13297.738482,1.14435563768703
-c39,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,13.092790,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,2.182132,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13539.421255,1.12392854286792
-c40,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,17.457054,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,10.910659,50.3589,22.1017,23.3959,6.45743,15057.140543,1.01063956800287
-c41,367.1935,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,13.092790,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14267.452864,1.06657734587924
-c42,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,17.457054,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,13.092790,13.092790,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,6.546395,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13829.134659,1.10038280629933
-c43,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,326.6018,2.182132,49.7509,21.8376,347.1207,10.910659,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15281.976974,0.995770510655328
-c44,367.1935,1160.9424,17.457054,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,13.092790,49.6507,39.5474,226.8183,10.910659,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14676.314533,1.03686398767873
-c45,367.1935,1160.9424,1316.8065,109.339,74.5864,1558.1515,17.457054,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,4.364263,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,6.546395,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14849.218642,1.02479075695469
-c27,12056.745877
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,137.165,207.8389,189.6515,31.9722,19.0941,208.4406,198.6494,31.9182,19.1354,259.7649,285.9489,32.0305,19.0951,123.27345,142.4026,83.6932,18.2347,9.39785,151.93707,142.1728,18.074,9.41748,151.49962,142.1021,18.0244,9.40974,77.39211,95.8733,48.1179,14.1964,4.87483,100.88243,95.9383,14.534,4.81198,100.7067,95.9563,14.737,4.81593,7.47447,0.725589,3341.380949,0.999999970072255
-c1,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3423.27821,0.976076335727433
-c2,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3423.27821,0.976076335727433
-c3,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4026.294652,0.829889800626301
-c4,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,6.804352,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3346.637842,0.998429171876042
-c5,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,6.804352,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,1.153280,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3329.819752,1.00347198873028
-c6,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,5.766400,11.2081,4.48848,5.19545,1.571852,3267.556342,1.02259318494123
-c7,137.165,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3635.26387,0.91915772185314
-c8,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,6.804352,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,3.459840,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3375.271292,0.989959194664961
-c9,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,5.766400,11.2081,4.48848,5.19545,1.571852,3661.254368,0.912632808837586
-c10,137.165,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.190102,0.964270575124984
-c11,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,1.153280,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3724.001434,0.897255524331371
-c12,137.165,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3566.52206,0.936873738364772
-c13,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,3.459840,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3627.667468,0.921082454873936
-c14,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,6.804352,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3768.728224,0.886607009510723
-c15,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3423.27821,0.976076335727433
-c16,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3423.27821,0.976076335727433
-c17,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3423.27821,0.976076335727433
-c18,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,6.804352,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3346.637842,0.998429171876042
-c19,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,6.804352,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,3.459840,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3375.271292,0.989959194664961
-c20,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,3.459840,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3627.667468,0.921082454873936
-c21,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,5.766400,11.2081,4.48848,5.19545,1.571852,3267.556342,1.02259318494123
-c22,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,5.766400,11.2081,4.48848,5.19545,1.571852,3661.254368,0.912632808837586
-c23,137.165,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.190102,0.964270575124984
-c24,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,1.153280,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3724.001434,0.897255524331371
-c25,137.165,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3566.52206,0.936873738364772
-c26,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,6.804352,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3768.728224,0.886607009510723
-c27,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,6.804352,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,5.766400,11.2081,4.48848,5.19545,1.571852,2966.434488,1.12639630164668
-c28,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,6.804352,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,1.153280,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3329.819752,1.00347198873028
-c29,137.165,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3635.26387,0.91915772185314
-c30,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3423.27821,0.976076335727433
-c31,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3423.27821,0.976076335727433
-c32,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4026.294652,0.829889800626301
-c33,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,6.804352,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3346.637842,0.998429171876042
-c34,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3423.27821,0.976076335727433
-c35,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,6.804352,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3768.728224,0.886607009510723
-c36,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3423.27821,0.976076335727433
-c37,137.165,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3635.26387,0.91915772185314
-c38,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,5.766400,11.2081,4.48848,5.19545,1.571852,3267.556342,1.02259318494123
-c39,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,6.804352,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,1.153280,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3329.819752,1.00347198873028
-c40,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,5.766400,11.2081,4.48848,5.19545,1.571852,3661.254368,0.912632808837586
-c41,137.165,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,6.804352,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.190102,0.964270575124984
-c42,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,9.024416,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,6.804352,6.804352,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,3.459840,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3375.271292,0.989959194664961
-c43,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,74.50821,1.153280,11.2873,4.46626,80.09278,5.766400,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3724.001434,0.897255524331371
-c44,137.165,342.9757,9.024416,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,6.804352,11.2352,8.56125,51.58494,5.766400,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3566.52206,0.936873738364772
-c45,137.165,342.9757,328.4669,25.2021,16.7972,356.8393,9.024416,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,2.306560,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,3.459840,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3627.667468,0.921082454873936
-c27,2966.434488
-
-Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,367.1935,674.9156,734.4923,131.604,78.945,930.3708,938.716,154.555,92.4142,1252.9937,1385.115,157.253,93.1857,596.759,690.021,401.5683,85.0395,42.293,719.1726,678.3662,85.9011,43.1252,726.3655,686.3864,86.3285,43.6635,373.8222,471.2729,233.5022,67.2674,20.4251,491.0196,475.3594,71.0934,21.0221,501.5035,483.9556,73.1106,21.5459,35.1354,0.5632148,15217.3421148,0.99999999342855
-c1,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14170.414308,1.07388123428549
-c2,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14170.414308,1.07388123428549
-c3,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16691.893683,0.911660612787881
-c4,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,42.156294,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13851.055049,1.0986413634992
-c5,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,42.156294,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,10.570936,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13754.679085,1.10633929807647
-c6,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,29.560412,50.3589,22.1017,23.3959,6.45743,13523.257261,1.12527194510737
-c7,367.1935,1240.8474,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15151.161443,1.00436801968035
-c8,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,67.644319,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,42.156294,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,18.903715,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14029.711252,1.08465111882938
-c9,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,67.644319,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,29.560412,50.3589,22.1017,23.3959,6.45743,15205.882561,1.00075361977042
-c10,367.1935,1240.8474,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14455.672137,1.05269003511649
-c11,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,10.570936,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15434.813425,0.98591033122313
-c12,367.1935,1240.8474,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14854.120055,1.02445260681951
-c13,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,18.903715,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15008.497617,1.01391507676104
-c14,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,42.156294,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15618.106589,0.974339746668381
-c15,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14170.414308,1.07388123428549
-c16,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14170.414308,1.07388123428549
-c17,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14170.414308,1.07388123428549
-c18,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,42.156294,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13851.055049,1.0986413634992
-c19,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,67.644319,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,42.156294,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,18.903715,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14029.711252,1.08465111882938
-c20,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,18.903715,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15008.497617,1.01391507676104
-c21,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,29.560412,50.3589,22.1017,23.3959,6.45743,13523.257261,1.12527194510737
-c22,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,67.644319,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,29.560412,50.3589,22.1017,23.3959,6.45743,15205.882561,1.00075361977042
-c23,367.1935,1240.8474,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14455.672137,1.05269003511649
-c24,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,10.570936,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15434.813425,0.98591033122313
-c25,367.1935,1240.8474,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14854.120055,1.02445260681951
-c26,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,42.156294,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15618.106589,0.974339746668381
-c27,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,67.644319,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,42.156294,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,29.560412,50.3589,22.1017,23.3959,6.45743,12313.802168,1.23579555555683
-c28,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,42.156294,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,10.570936,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13754.679085,1.10633929807647
-c29,367.1935,1240.8474,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15151.161443,1.00436801968035
-c30,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14170.414308,1.07388123428549
-c31,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14170.414308,1.07388123428549
-c32,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,16691.893683,0.911660612787881
-c33,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,42.156294,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13851.055049,1.0986413634992
-c34,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14170.414308,1.07388123428549
-c35,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,42.156294,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15618.106589,0.974339746668381
-c36,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14170.414308,1.07388123428549
-c37,367.1935,1240.8474,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15151.161443,1.00436801968035
-c38,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,29.560412,50.3589,22.1017,23.3959,6.45743,13523.257261,1.12527194510737
-c39,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,42.156294,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,10.570936,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,13754.679085,1.10633929807647
-c40,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,67.644319,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,29.560412,50.3589,22.1017,23.3959,6.45743,15205.882561,1.00075361977042
-c41,367.1935,1240.8474,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,42.156294,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14455.672137,1.05269003511649
-c42,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,67.644319,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,42.156294,42.156294,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,18.903715,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14029.711252,1.08465111882938
-c43,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,326.6018,10.570936,49.7509,21.8376,347.1207,29.560412,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15434.813425,0.98591033122313
-c44,367.1935,1240.8474,67.644319,109.339,74.5864,1558.1515,1543.1258,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,422.944,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,42.156294,49.6507,39.5474,226.8183,29.560412,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,14854.120055,1.02445260681951
-c45,367.1935,1240.8474,1316.8065,109.339,74.5864,1558.1515,67.644319,108.541,77.2477,1693.1477,1601.9877,107.72,72.987,452.0318,648.5595,21.193653,49.588,38.5238,668.3418,737.6456,49.8165,38.8844,763.2659,735.4767,49.6507,39.5474,18.903715,326.6018,221.6533,49.7509,21.8376,347.1207,382.2379,50.4854,21.963,393.9254,382.9785,50.3589,22.1017,23.3959,6.45743,15008.497617,1.01391507676104
-c27,12313.802168
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,58.406021,0
-c2,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,58.406021,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0
-c4,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,67.188774,0
-c5,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,3.468407,0,0,0,11.797401,0,0,0,0,0,0,0,0,70.657181,0
-c6,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,11.797401,0,0,0,0,78.986175,0
-c7,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,39.616747,0
-c8,0,0,0,0,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,62.743099,0
-c9,0,0,0,0,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,35.639522,0
-c10,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,55.391373,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,3.468407,0,0,0,11.797401,0,0,0,0,0,0,0,0,38.032307,0
-c12,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,51.414148,0
-c13,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,38.18572,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,34.5639,0
-c15,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,58.406021,0
-c16,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,58.406021,0
-c17,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,58.406021,0
-c18,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,67.188774,0
-c19,0,0,0,0,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,62.743099,0
-c20,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,38.18572,0
-c21,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,11.797401,0,0,0,0,78.986175,0
-c22,0,0,0,0,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,35.639522,0
-c23,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,55.391373,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,3.468407,0,0,0,11.797401,0,0,0,0,0,0,0,0,38.032307,0
-c25,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,51.414148,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,34.5639,0
-c27,0,0,0,0,0,0,23.842121,0,0,0,23.842121,0,0,0,0,0,0,0,0,15.774626,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,91.030895,0
-c28,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,3.468407,0,0,0,11.797401,0,0,0,0,0,0,0,0,70.657181,0
-c29,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,39.616747,0
-c30,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,58.406021,0
-c31,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,58.406021,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0
-c33,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,67.188774,0
-c34,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,58.406021,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,34.5639,0
-c36,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,0,0,0,0,58.406021,0
-c37,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,39.616747,0
-c38,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,11.797401,0,0,0,11.797401,0,0,0,0,78.986175,0
-c39,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,15.774626,0,0,0,0,0,0,0,0,15.774626,0,0,0,0,3.468407,0,0,0,11.797401,0,0,0,0,0,0,0,0,70.657181,0
-c40,0,0,0,0,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.797401,0,0,0,0,35.639522,0
-c41,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,15.774626,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,55.391373,0
-c42,0,0,0,0,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,15.774626,15.774626,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,62.743099,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,15.774626,0,0,0,0,3.468407,0,0,0,11.797401,0,0,0,0,0,0,0,0,38.032307,0
-c44,0,0,23.842121,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.774626,0,0,0,11.797401,0,0,0,0,0,0,0,0,0,0,0,0,0,51.414148,0
-c45,0,0,0,0,0,0,23.842121,0,0,0,0,0,0,0,0,6.991873,0,0,0,0,0,0,0,0,0,0,7.351726,0,0,0,0,0,0,0,0,0,0,0,0,0,0,38.18572,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,56.323891,0
-c2,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,56.323891,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0
-c4,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,59.775252,0
-c5,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,4.920397,0,0,0,6.852352,0,0,0,0,0,0,0,0,64.695649,0
-c6,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,6.852352,0,0,0,0,66.627604,0
-c7,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,39.634022,0
-c8,0,0,0,0,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,57.928494,0
-c9,0,0,0,0,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,33.197496,0
-c10,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,52.9229,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,4.920397,0,0,0,6.852352,0,0,0,0,0,0,0,0,34.899144,0
-c12,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,46.486374,0
-c13,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41.188255,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,29.978747,0
-c15,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,56.323891,0
-c16,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,56.323891,0
-c17,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,56.323891,0
-c18,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,59.775252,0
-c19,0,0,0,0,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,57.928494,0
-c20,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41.188255,0
-c21,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,6.852352,0,0,0,0,66.627604,0
-c22,0,0,0,0,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,33.197496,0
-c23,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,52.9229,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,4.920397,0,0,0,6.852352,0,0,0,0,0,0,0,0,34.899144,0
-c25,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,46.486374,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,29.978747,0
-c27,0,0,0,0,0,0,26.345144,0,0,0,26.345144,0,0,0,0,0,0,0,0,13.288878,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,86.120396,0
-c28,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,4.920397,0,0,0,6.852352,0,0,0,0,0,0,0,0,64.695649,0
-c29,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,39.634022,0
-c30,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,56.323891,0
-c31,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,56.323891,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0
-c33,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,59.775252,0
-c34,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,56.323891,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,29.978747,0
-c36,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,0,0,0,0,56.323891,0
-c37,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,39.634022,0
-c38,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,6.852352,0,0,0,6.852352,0,0,0,0,66.627604,0
-c39,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,13.288878,0,0,0,0,0,0,0,0,13.288878,0,0,0,0,4.920397,0,0,0,6.852352,0,0,0,0,0,0,0,0,64.695649,0
-c40,0,0,0,0,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.852352,0,0,0,0,33.197496,0
-c41,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,13.288878,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,52.9229,0
-c42,0,0,0,0,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,13.288878,13.288878,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,57.928494,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,13.288878,0,0,0,0,4.920397,0,0,0,6.852352,0,0,0,0,0,0,0,0,34.899144,0
-c44,0,0,26.345144,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.288878,0,0,0,6.852352,0,0,0,0,0,0,0,0,0,0,0,0,0,46.486374,0
-c45,0,0,0,0,0,0,26.345144,0,0,0,0,0,0,0,0,9.837517,0,0,0,0,0,0,0,0,0,0,5.005594,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41.188255,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,14.432644,0
-c2,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,14.432644,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0
-c4,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,15.657877,0
-c5,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,1.153683,0,0,0,2.071773,0,0,0,0,0,0,0,0,16.81156,0
-c6,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,2.071773,0,0,0,0,17.72965,0
-c7,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.052188,0
-c8,0,0,0,0,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.015536,0
-c9,0,0,0,0,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,8.590045,0
-c10,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.586104,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,1.153683,0,0,0,2.071773,0,0,0,0,0,0,0,0,9.068055,0
-c12,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,12.123961,0
-c13,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.256387,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,7.914372,0
-c15,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,14.432644,0
-c16,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,14.432644,0
-c17,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,14.432644,0
-c18,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,15.657877,0
-c19,0,0,0,0,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.015536,0
-c20,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.256387,0
-c21,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,2.071773,0,0,0,0,17.72965,0
-c22,0,0,0,0,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,8.590045,0
-c23,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.586104,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,1.153683,0,0,0,2.071773,0,0,0,0,0,0,0,0,9.068055,0
-c25,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,12.123961,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,7.914372,0
-c27,0,0,0,0,0,0,6.518272,0,0,0,6.518272,0,0,0,0,0,0,0,0,3.533916,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,22.176149,0
-c28,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,1.153683,0,0,0,2.071773,0,0,0,0,0,0,0,0,16.81156,0
-c29,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.052188,0
-c30,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,14.432644,0
-c31,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,14.432644,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0
-c33,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,15.657877,0
-c34,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,14.432644,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,7.914372,0
-c36,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,0,0,0,0,14.432644,0
-c37,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.052188,0
-c38,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,2.071773,0,0,0,2.071773,0,0,0,0,17.72965,0
-c39,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,3.533916,0,0,0,0,0,0,0,0,3.533916,0,0,0,0,1.153683,0,0,0,2.071773,0,0,0,0,0,0,0,0,16.81156,0
-c40,0,0,0,0,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.071773,0,0,0,0,8.590045,0
-c41,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,3.533916,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.586104,0
-c42,0,0,0,0,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,3.533916,3.533916,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.015536,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,3.533916,0,0,0,0,1.153683,0,0,0,2.071773,0,0,0,0,0,0,0,0,9.068055,0
-c44,0,0,6.518272,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.533916,0,0,0,2.071773,0,0,0,0,0,0,0,0,0,0,0,0,0,12.123961,0
-c45,0,0,0,0,0,0,6.518272,0,0,0,0,0,0,0,0,2.308683,0,0,0,0,0,0,0,0,0,0,1.429432,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.256387,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c2,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c3,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c4,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c5,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c6,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c7,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c8,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c9,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c10,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c11,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c12,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c13,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c14,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c15,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c16,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c17,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c18,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c19,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c20,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c21,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c22,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c23,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c24,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c25,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c26,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c27,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c28,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c29,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c30,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c31,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c32,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c33,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c34,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c35,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c36,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c37,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c38,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c39,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c40,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c41,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c42,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c43,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c44,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c45,0,79.905,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,79.905,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c2,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c3,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c4,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c5,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c6,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c7,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c8,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c9,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c10,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c11,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c12,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c13,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c14,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c15,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c16,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c17,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c18,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c19,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c20,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c21,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c22,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c23,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c24,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c25,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c26,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c27,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c28,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c29,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c30,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c31,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c32,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c33,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c34,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c35,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c36,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c37,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c38,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c39,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c40,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c41,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c42,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c43,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c44,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c45,0,27.627,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,27.627,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,137.165,207.8389,189.6515,31.9722,19.0941,208.4406,198.6494,31.9182,19.1354,259.7649,285.9489,32.0305,19.0951,123.27345,142.4026,83.6932,18.2347,9.39785,151.93707,142.1728,18.074,9.41748,151.49962,142.1021,18.0244,9.40974,77.39211,95.8733,48.1179,14.1964,4.87483,100.88243,95.9383,14.534,4.81198,100.7067,95.9563,14.737,4.81593,7.47447,0.725589,3341.380949,0.999999970072255
-c1,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.337854,0.96422946141316
-c2,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.337854,0.96422946141316
-c3,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4056.230335,0.823765070191334
-c4,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,10.338268,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3389.922719,0.985680538291925
-c5,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,10.338268,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,2.306963,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3374.258312,0.990256388519896
-c6,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,7.838173,11.2081,4.48848,5.19545,1.571852,3312.912992,1.00859299843052
-c7,137.165,370.6027,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3672.943058,0.909728467134639
-c8,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,10.338268,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,4.889272,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3417.913828,0.977608277852455
-c9,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,7.838173,11.2081,4.48848,5.19545,1.571852,3697.471413,0.903693493581218
-c10,137.165,370.6027,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3506.403206,0.952936857914313
-c11,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,2.306963,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3760.696489,0.888500539706794
-c12,137.165,370.6027,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3606.273021,0.926546835718714
-c13,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,4.889272,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3665.550855,0.911563088337972
-c14,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,10.338268,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3804.269596,0.87832388763428
-c15,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.337854,0.96422946141316
-c16,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.337854,0.96422946141316
-c17,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.337854,0.96422946141316
-c18,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,10.338268,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3389.922719,0.985680538291925
-c19,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,10.338268,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,4.889272,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3417.913828,0.977608277852455
-c20,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,4.889272,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3665.550855,0.911563088337972
-c21,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,7.838173,11.2081,4.48848,5.19545,1.571852,3312.912992,1.00859299843052
-c22,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,7.838173,11.2081,4.48848,5.19545,1.571852,3697.471413,0.903693493581218
-c23,137.165,370.6027,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3506.403206,0.952936857914313
-c24,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,2.306963,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3760.696489,0.888500539706794
-c25,137.165,370.6027,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3606.273021,0.926546835718714
-c26,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,10.338268,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3804.269596,0.87832388763428
-c27,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,10.338268,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,7.838173,11.2081,4.48848,5.19545,1.571852,3016.237637,1.10779760759952
-c28,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,10.338268,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,2.306963,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3374.258312,0.990256388519896
-c29,137.165,370.6027,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3672.943058,0.909728467134639
-c30,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.337854,0.96422946141316
-c31,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.337854,0.96422946141316
-c32,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,4056.230335,0.823765070191334
-c33,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,10.338268,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3389.922719,0.985680538291925
-c34,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.337854,0.96422946141316
-c35,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,10.338268,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3804.269596,0.87832388763428
-c36,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3465.337854,0.96422946141316
-c37,137.165,370.6027,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3672.943058,0.909728467134639
-c38,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,7.838173,11.2081,4.48848,5.19545,1.571852,3312.912992,1.00859299843052
-c39,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,10.338268,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,2.306963,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3374.258312,0.990256388519896
-c40,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,7.838173,11.2081,4.48848,5.19545,1.571852,3697.471413,0.903693493581218
-c41,137.165,370.6027,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,10.338268,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3506.403206,0.952936857914313
-c42,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,15.542688,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,10.338268,10.338268,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,4.889272,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3417.913828,0.977608277852455
-c43,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,74.50821,2.306963,11.2873,4.46626,80.09278,7.838173,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3760.696489,0.888500539706794
-c44,137.165,370.6027,15.542688,25.2021,16.7972,356.8393,359.5265,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,99.91022,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,10.338268,11.2352,8.56125,51.58494,7.838173,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3606.273021,0.926546835718714
-c45,137.165,370.6027,328.4669,25.2021,16.7972,356.8393,15.542688,24.9686,17.3272,399.7504,388.4166,25.4775,16.8139,104.7504,148.08761,4.615243,11.4693,8.57887,157.84032,176.87812,11.2953,8.59732,181.04838,175.99631,11.2352,8.56125,4.889272,74.50821,50.93214,11.2873,4.46626,80.09278,89.0888,11.1843,4.46968,90.82636,89.01816,11.2081,4.48848,5.19545,1.571852,3665.550855,0.911563088337972
-c27,3016.237637
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,NML1,NML2,Conv4,Conv5,NML3,NML4,Conv6,Conv7,NML5,NML6,Conv8,Conv9,Conv10,NML7,NML8,Conv11,Conv12,NML9,NML10,Conv13,Conv14,NML11,NML12,Conv15,Conv16,Conv17,NML13,NML14,Conv18,Conv19,NML15,NML16,Conv20,Conv21,NML17,NML18,NML19,FC1,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_tensors.txt b/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_tensors.txt
deleted file mode 100644
index a68f03802ad3eb3236f0eaf1a2dbe4eb524d712a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/resnet18_cifar10/resnet18_tensors.txt
+++ /dev/null
@@ -1,114 +0,0 @@
-#Conv1,3
-Conv1,96.2953,255.037,174.622,421.265,6.3224,13.0911,20.9136,52.8942
-Add1,21.6934,58.7548,18.4067,51.7458,27.7102,72.282,20.7976,56.1939
-Relu1,19.1763,53.4017,15.7218,47.6272,27.4697,75.5402,20.8432,59.4527
-#Conv2,3
-Conv2,167.13,533.192,307.782,1024.12,27.627,79.905,20.9352,73.9105
-Add2,21.6463,74.901,19.5015,74.6206,27.9959,99.3789,21.8283,78.8418
-Relu2,19.0626,66.8226,15.6922,62.2018,27.4183,99.9831,20.833,77.2117
-#Conv3,2
-Conv3,167.964,647.325,309.083,1232.59,27.8734,103.065,20.9195,84.5406
-Add3,21.6875,87.1673,19.3839,84.2165,27.9995,113.419,21.828,88.7986
-#NML1,1
-Add4,31.9722,131.604,25.2021,109.339,54.8381,227.205,20.8082,85.9608
-#NML2,1
-Relu3,19.0941,78.945,16.7972,74.5864,27.5363,113.932,21.969,91.4346
-#Conv4,3
-Conv4,167.592,744.048,322.719,1401.3,27.8507,115.914,20.9916,91.4069
-Add5,21.6558,98.7438,18.3902,84.3374,52.5577,223.355,21.3908,91.48
-Relu4,19.1928,87.579,15.7301,72.5141,27.4947,117.455,20.8274,89.0752
-#Conv5,2
-Conv5,177.005,835.874,342.079,1466.82,29.7996,126.901,21.0449,89.4069
-Add6,21.6444,102.842,17.4475,76.3058,78.6158,330.238,20.8524,85.6316
-#NML3,1
-Add7,31.9182,154.555,24.9686,108.541,62.7404,259.957,20.8101,86.1797
-#NML4,1
-Relu5,19.1354,92.4142,17.3272,77.2477,30.181,124.467,21.9931,91.2
-#Conv6,3
-Conv6,218.988,1050.1,366.167,1544.8,29.7189,123.016,21.0334,89.908
-Add8,21.6322,107.553,17.6899,77.5743,83.3626,352.496,20.8989,85.9449
-Relu6,19.1447,95.3407,15.8935,70.7734,29.4192,120.534,20.9166,86.1995
-#Conv7,2
-Conv7,264.136,1280.25,370.721,1526.79,28.6144,117.921,20.9627,85.9711
-Add9,21.8129,104.865,17.6956,75.1977,82.8254,337.55,20.8517,82.5853
-#NML5,1
-Add10,32.0305,157.253,25.4775,107.72,60.3765,243.339,20.7649,83.3467
-#NML6,1
-Relu7,19.0951,93.1857,16.8139,72.987,28.3017,113.854,22.041,89.2722
-#Conv8,3
-Conv8,100.507,490.584,85.7697,364.625,28.2226,114.351,10.5944,42.4202
-Add11,13.2849,62.5018,10.3212,47.2214,15.502,62.8822,11.2293,44.7914
-Relu8,9.48155,43.6732,8.6595,40.1854,15.0619,61.1893,10.7582,43.1574
-#Conv9,2
-Conv9,129.091,627.118,138.631,604.548,14.972,60.9,10.6041,44.2087
-Add12,13.3116,62.903,9.45661,44.0115,40.1768,172.415,10.4892,42.8418
-#Conv10,2
-Conv10,70.4671,340.841,90.3128,381.213,32.0547,135.935,10.5675,40.812
-Add13,13.2261,60.7273,9.59742,41.731,41.9408,167.231,10.4872,39.7994
-#NML7,1
-Add14,18.2347,85.0395,11.4693,49.588,27.8587,110.353,10.4665,40.3814
-#NML8,1
-Relu9,9.39785,42.293,8.57887,38.5238,14.4553,55.9886,11.1054,43.0539
-#Conv11,3
-Conv11,129.334,614.475,140.577,589.922,15.1273,59.0802,10.5792,42.4989
-Add15,13.201,61.5741,9.39457,42.3399,40.147,165.995,10.4569,41.0957
-Relu10,9.40207,43.1235,7.86875,36.0799,14.7797,59.1119,10.4938,41.2344
-#Conv12,2
-Conv12,129.068,617.019,167.515,695.778,14.881,59.0727,10.5947,41.783
-Add16,13.1048,61.3472,9.36312,41.8676,40.662,165.398,10.4433,40.5802
-#NML9,1
-Add17,18.074,85.9011,11.2953,49.8165,31.2262,125.787,10.4534,40.67
-#NML10,1
-Relu11,9.41748,43.1252,8.59732,38.8844,14.7683,58.4599,11.0992,43.4074
-#Conv13,3
-Conv13,129.135,621.204,163.856,684.766,14.6929,58.143,10.5613,42.4051
-Add18,13.0014,61.4375,9.33772,41.9974,40.52,167.742,10.4893,41.2948
-Relu12,9.36322,43.724,7.85466,36.5025,14.3058,56.635,10.4801,41.3643
-#Conv14,2
-Conv14,129.174,625.346,166.656,693.932,14.7899,59.1467,10.5466,41.836
-Add19,12.9281,61.0404,9.34031,41.5447,40.5583,165.866,10.4607,40.9009
-#NML11,1
-Add20,18.0244,86.3285,11.2352,49.6507,31.0414,125.831,10.4493,41.12
-#NML12,1
-Relu13,9.40974,43.6635,8.56125,39.5474,14.7184,58.0328,11.0576,43.5721
-#Conv15,3
-Conv15,61.1393,299.396,38.244,164.048,14.5914,58.2375,5.36591,19.9204
-Add21,11.443,53.5959,9.3646,42.9504,8.53298,33.0218,5.65061,21.0508
-Relu14,4.80981,20.8303,3.97634,19.8199,7.82028,30.3544,5.27702,19.5391
-#Conv16,2
-Conv16,84.5931,417.41,65.7442,285.597,8.07266,31.1377,5.37147,20.4791
-Add22,11.2802,53.8629,8.76401,41.0048,20.0626,83.8331,5.28191,19.8187
-#Conv17,2
-Conv17,36.7136,180.084,42.064,181.248,16.9562,69.8631,5.36951,19.8329
-Add23,11.4043,53.4182,8.86814,40.4053,20.3149,81.1815,5.29007,18.9716
-#NML13,1
-Add24,14.1964,67.2674,11.2873,49.7509,14.405,57.1891,5.33388,19.3044
-#NML14,1
-Relu15,4.87483,20.4251,4.46626,21.8376,7.89618,29.6604,5.52563,20.1041
-#Conv18,3
-Conv18,84.6165,415.961,67.3709,286.812,8.03381,30.3417,5.36257,20.0592
-Add25,11.4019,54.4409,8.75476,40.2434,20.1161,82.2092,5.27612,19.5688
-Relu16,4.86403,20.6177,3.96712,20.0653,7.75796,29.607,5.27422,19.6466
-#Conv19,2
-Conv19,84.5493,420.587,80.3214,341.92,8.10378,31.2805,5.41437,20.2452
-Add26,11.389,54.7724,8.7674,40.3179,20.1714,82.6833,5.2755,19.4721
-#NML15,1
-Add27,14.534,71.0934,11.1843,50.4854,17.215,69.9894,5.28841,19.6402
-#NML16,1
-Relu17,4.81198,21.0221,4.46968,21.963,8.1633,31.6672,5.53729,20.6582
-#Conv20,3
-Conv20,84.5033,424.692,78.1278,333.871,8.0013,30.8855,5.3629,19.8115
-Add28,11.3928,55.6158,8.73735,40.0918,20.0577,82.5983,5.33532,19.8316
-Relu18,4.8106,21.1957,3.96121,19.9626,7.75018,29.9637,5.25633,19.6445
-#Conv21,2
-Conv21,84.5829,428.664,80.2786,343.147,8.01787,30.88,5.37001,20.3721
-Add29,11.3734,55.2916,8.73956,39.8315,19.9302,82.4415,5.31975,19.701
-#NML17,1
-Add30,14.737,73.1106,11.2081,50.3589,17.4758,71.5971,5.27643,19.4931
-#NML18,1
-Relu19,4.81593,21.5459,4.48848,22.1017,8.14713,31.7315,5.53758,20.7902
-#NML19,1
-Pool1,7.47447,35.1354,5.19545,23.3959,7.76594,29.7619,0.247834,0.0992326
-#FC1,2
-Mul1,0.500934,0.486919,1.17541,5.23179,0.866649,1.15065,0.182164,0.0453634
-Add31,0.224655,0.0762958,0.396442,1.22564,0.809076,1.36364,0.129454,0.0344863
diff --git a/llvm/projects/soc_simulator/src/Makefile b/llvm/projects/soc_simulator/src/Makefile
deleted file mode 100644
index c31b4d295f0bffb35da63e965fc9cb1d8163af99..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/Makefile
+++ /dev/null
@@ -1,16 +0,0 @@
-all: clean timing quant patch gemm
-
-timing:
-	g++ -std=c++11 -O3 promise_timing_model.cpp -o ptm
-
-quant:
-	nvcc -std=c++11 -O3 -arch=sm_62 quantization.cu -o quantize
-
-patch:
-	nvcc -std=c++11 -O3 -arch=sm_62 patch.cu -o patch
-
-gemm:
-	nvcc -std=c++11 -O3 -arch=sm_62 gemm.cu fp16_emu.cpp -o gemm -lcublas
-
-clean:
-	rm -rf ptm quantize patch gemm
diff --git a/llvm/projects/soc_simulator/src/copy_tensor_data.pl b/llvm/projects/soc_simulator/src/copy_tensor_data.pl
deleted file mode 100755
index 98729936071d49bf95017c429e202f107babdae9..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/copy_tensor_data.pl
+++ /dev/null
@@ -1,130 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-my %tensors;
-
-########################################
-if (($#ARGV + 1) != 4) {
-    print "Usage: copy_tensor_data.pl <tensor info> <FP16 data> <FP32 data> <output tensor file>\n";
-    exit;
-}
-
-# Input tensor file
-my $tensor_filename = $ARGV[0];
-
-# FP16 data file
-my $fp16_data_filename = $ARGV[1];
-
-# FP32 data file
-my $fp32_data_filename = $ARGV[2];
-
-# Output tensor file
-my $output_filename = $ARGV[3];
-########################################
-
-########################################
-print "Reading FP16 data\n";
-open(my $fp16_data_file, '<', $fp16_data_filename) or die "Couldn't open FP16 data file $fp16_data_filename: $!";
-while (my $line = <$fp16_data_file>) {
-    chomp $line;
-    my @tokens = split /,/, $line;
-
-    # The format of each line is:
-    # 0        1     2
-    # Op name, time, energy
-    my $op_name = $tokens[0];
-    my $op_time = $tokens[1];
-    my $op_energy = $tokens[2];
-
-    if (is_f2h($op_name)) {
-        # Remove _f2h
-        $op_name = substr($op_name, 0, -4);
-        $tensors{$op_name}{"f2h_time"} = $op_time;
-        $tensors{$op_name}{"f2h_energy"} = $op_energy;
-    } elsif (is_h2f($op_name)) {
-        # Remove _h2f
-        $op_name = substr($op_name, 0, -4);
-        $tensors{$op_name}{"h2f_time"} = $op_time;
-        $tensors{$op_name}{"h2f_energy"} = $op_energy;
-    } else {
-        $tensors{$op_name}{"fp16_time"} = $op_time;
-        $tensors{$op_name}{"fp16_energy"} = $op_energy;
-    }
-}
-########################################
-
-########################################
-print "Reading FP32 data\n";
-open(my $fp32_data_file, '<', $fp32_data_filename) or die "Couldn't open FP32 data file $fp32_data_filename: $!";
-while (my $line = <$fp32_data_file>) {
-    chomp $line;
-    my @tokens = split /,/, $line;
-
-    # The format of each line is:
-    # 0        1     2
-    # Op name, time, energy
-    my $op_name = $tokens[0];
-    my $op_time = $tokens[1];
-    my $op_energy = $tokens[2];
-
-    $tensors{$op_name}{"Name"} = $op_name;
-    $tensors{$op_name}{"fp32_time"} = $op_time;
-    $tensors{$op_name}{"fp32_energy"} = $op_energy;
-}
-########################################
-
-########################################
-print "Generating output tensor file\n";
-open(my $tensor_file, '<', $tensor_filename) or die "Couldn't open tensor info file $tensor_filename: $!";
-open(my $output_file, '>', $output_filename) or die "Couldn't open results file $output_filename: $!";
-while (my $line = <$tensor_file>) {
-    print $output_file $line;
-    chomp $line;
-
-    # Layer lines look like this: #layer_name, num_tensor_ops_in_layer
-    my @tokens = split /,/, $line;
-    my $layer_name = substr($tokens[0], 1);
-    my $num_ops = $tokens[1];
-
-    # Tensor lines look like this: name
-    for (my $i = 0; $i < $num_ops; $i++) {
-        my $op_name = <$tensor_file>;
-        chomp $op_name;
-
-        # Format of each output line:
-        # Name, FP32 time, FP32 energy, FP16 time, FP16 energy, f2h time, f2h energy, h2f time, h2f energy
-        print $output_file $tensors{$op_name}{"Name"} . ",";
-        print $output_file $tensors{$op_name}{"fp32_time"} . ",";
-        print $output_file $tensors{$op_name}{"fp32_energy"} . ",";
-        print $output_file $tensors{$op_name}{"fp16_time"} . ",";
-        print $output_file $tensors{$op_name}{"fp16_energy"} . ",";
-        print $output_file $tensors{$op_name}{"f2h_time"} . ",";
-        print $output_file $tensors{$op_name}{"f2h_energy"} . ",";
-        print $output_file $tensors{$op_name}{"h2f_time"} . ",";
-        print $output_file $tensors{$op_name}{"h2f_energy"} . "\n";
-    }
-}
-
-print "Done!\n";
-########################################
-
-sub is_f2h {
-    my ($name) = @_;
-    if ($name =~ /f2h/i) {
-        return 1;
-    } else {
-        return;
-    }
-}
-
-sub is_h2f {
-    my ($name) = @_;
-    if ($name =~ /h2f/i) {
-        return 1;
-    } else {
-        return;
-    }
-}
-
diff --git a/llvm/projects/soc_simulator/src/driver.pl b/llvm/projects/soc_simulator/src/driver.pl
deleted file mode 100755
index fe53ca9d850e58e72a1e8ebe8d7e048d24ddc017..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/driver.pl
+++ /dev/null
@@ -1,506 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-my $fp16_swing = 8;
-my $iterations = 10;
-
-my $PROMISE = "PROMISE";
-my $fp16 = "FP16";
-my $fp32 = "FP32";
-
-my @layers;
-my %tensors;
-my %results;
-
-########################################
-if (($#ARGV + 1) != 6) {
-    print "Usage: driver.pl <layer info> <tensor info> <configurations> <results file> <smart DMA> <detailed results?>\n";
-    exit;
-}
-
-my $layer_filename = $ARGV[0];
-my $tensor_filename = $ARGV[1];
-my $config_filename = $ARGV[2];
-my $results_filename = $ARGV[3];
-my $smart_dma = $ARGV[4];
-my $detailed_results = $ARGV[5];
-########################################
-
-########################################
-print "Reading layer info\n";
-open(my $layer_file, '<', $layer_filename) or die "Couldn't open layer info file $layer_filename: $!";
-while (my $line = <$layer_file>) {
-    chomp $line;
-    add_layer($line);
-}
-########################################
-
-########################################
-print "Reading tensor info\n";
-open(my $tensor_file, '<', $tensor_filename) or die "Couldn't open tensor info file $tensor_filename: $!";
-while (my $line = <$tensor_file>) {
-    chomp $line;
-    # Layer lines look like this: #layer_name, num_tensor_ops_in_layer
-    my @tokens = split /,/, $line;
-    my $layer_name = substr($tokens[0], 1);
-    my $num_ops = $tokens[1];
-
-    # Tensor lines look like this: name, FP32 time, FP32 energy, FP16 time, FP16 energy, f2h time, f2h energy, h2f time, h2f energy
-    for (my $i = 0; $i < $num_ops; $i++) {
-        my $op = <$tensor_file>;
-        chomp $op;
-        my @values = split /,/, $op;
-        $tensors{$layer_name}{$i} = [@values];
-    }
-}
-########################################
-
-########################################
-print "Running simulations\n";
-my $conf_count = 0;
-open(my $config_file, '<', $config_filename) or die "Couldn't open config file $config_filename: $!";
-open(my $results_file, '>', $results_filename) or die "Couldn't open results file $results_filename: $!";
-while (my $line = <$config_file>) {
-    chomp $line;
-
-    # Each line has a bunch of comma separated voltage swing levels
-    my @levels = split /,/, $line;
-    my $layer_count = 0;
-    my $prev = $fp32;
-    my $curr;
-    foreach my $level (@levels) {
-        my %layer = %{$layers[$layer_count]};
-
-        if (is_promise($level)) {
-            # The voltage level corresponds to PROMISE
-            print "Running layer $layer{\"Name\"} on PROMISE\n";
-            $curr = $PROMISE;
-
-            # Quantization
-            my ($qtime, $qenergy) = quantize($curr, $prev, 0, %layer);
-
-            # Patching
-            my ($ptime, $penergy) = patch(%layer);
-
-            # Compute
-            my ($ttime, $tenergy, $ctime, $cenergy, $mtime, $menergy, $lenergy) = promise($level, %layer);
-
-            # Unpatching
-            my ($utime, $uenergy) = unpatch(%layer);
-
-            # Layer info
-            $results{"Time"}{$conf_count}{$layer{"Name"}} = $mtime + $ctime;
-            $results{"Quantization Time"}{$conf_count}{$layer{"Name"}} = 0.0;
-            $results{"Memory Time"}{$conf_count}{$layer{"Name"}} = $mtime;
-            $results{"Compute Time"}{$conf_count}{$layer{"Name"}} = $ctime;
-
-            $results{"Energy"}{$conf_count}{$layer{"Name"}} = $qenergy + $penergy + $menergy + $cenergy + $uenergy + $lenergy;
-            $results{"Quantization Energy"}{$conf_count}{$layer{"Name"}} = $qenergy;
-            $results{"Patch Energy"}{$conf_count}{$layer{"Name"}} =   $penergy;
-            $results{"Memory Energy"}{$conf_count}{$layer{"Name"}} =  $menergy;
-            $results{"Compute Energy"}{$conf_count}{$layer{"Name"}} = $cenergy;
-            $results{"Unpatch Energy"}{$conf_count}{$layer{"Name"}} = $uenergy;
-            $results{"Leakage Energy"}{$conf_count}{$layer{"Name"}} = $lenergy;
-
-            # Aggregate info
-            $results{"Time"}{$conf_count}{"Total"} += ($mtime + $ctime);
-            $results{"Quantization Time"}{$conf_count}{"Total"} += 0.0;
-            $results{"Memory Time"}{$conf_count}{"Total"} += $mtime;
-            $results{"Compute Time"}{$conf_count}{"Total"} += $ctime;
-
-            $results{"Energy"}{$conf_count}{"Total"} += ($qenergy + $penergy + $menergy + $cenergy + $uenergy + $lenergy);
-            $results{"Quantization Energy"}{$conf_count}{"Total"} += $qenergy;
-            $results{"Patch Energy"}{$conf_count}{"Total"} +=   $penergy;
-            $results{"Memory Energy"}{$conf_count}{"Total"} +=  $menergy;
-            $results{"Compute Energy"}{$conf_count}{"Total"} += $cenergy;
-            $results{"Unpatch Energy"}{$conf_count}{"Total"} += $uenergy;
-            $results{"Leakage Energy"}{$conf_count}{"Total"} += $lenergy;
-        } else {
-            # The voltage level corresponds to GPU (FP16 or FP32)
-            print "Running layer $layer{\"Name\"} on the GPU\n";
-            my @sublevels = split / /, $level;
-            my $tensor_count = 0;
-
-            my $total_qtime;
-            my $total_ctime;
-            my $total_qenergy;
-            my $total_cenergy;
-
-            foreach my $sublevel (@sublevels) {
-                if ($sublevel == $fp16_swing) {
-                    $curr = $fp16;
-                } else {
-                    $curr = $fp32;
-                }
-
-                # Quantization
-                my ($qtime, $qenergy) = quantize($curr, $prev, $tensor_count, %layer);
-
-                # Compute
-                my ($ctime, $cenergy) = gpu($curr, $layer{"Name"}, $tensor_count);
-
-                # Update total
-                $total_qtime += $qtime;
-                $total_ctime += $ctime;
-                $total_qenergy += $qenergy;
-                $total_cenergy += $cenergy;
-
-                $prev = $curr;
-                $tensor_count++;
-            }
-
-            # Layer info
-            $results{"Time"}{$conf_count}{$layer{"Name"}} = $total_qtime + $total_ctime;
-            $results{"Quantization Time"}{$conf_count}{$layer{"Name"}} = $total_qtime;
-            $results{"Memory Time"}{$conf_count}{$layer{"Name"}} = 0.0;
-            $results{"Compute Time"}{$conf_count}{$layer{"Name"}} = $total_ctime;
-
-            $results{"Energy"}{$conf_count}{$layer{"Name"}} = $total_qenergy + $total_cenergy;
-            $results{"Quantization Energy"}{$conf_count}{$layer{"Name"}} = $total_qenergy;
-            $results{"Patch Energy"}{$conf_count}{$layer{"Name"}} =   0.0;
-            $results{"Memory Energy"}{$conf_count}{$layer{"Name"}} =  0.0;
-            $results{"Compute Energy"}{$conf_count}{$layer{"Name"}} = $total_cenergy;
-            $results{"Unpatch Energy"}{$conf_count}{$layer{"Name"}} = 0.0;
-            $results{"Leakage Energy"}{$conf_count}{$layer{"Name"}} = 0.0;
-
-            # Aggregate info
-            $results{"Time"}{$conf_count}{"Total"} += ($total_qtime + $total_ctime);
-            $results{"Quantization Time"}{$conf_count}{"Total"} += $total_qtime;
-            $results{"Memory Time"}{$conf_count}{"Total"} += 0.0;
-            $results{"Compute Time"}{$conf_count}{"Total"} += $total_ctime;
-
-            $results{"Energy"}{$conf_count}{"Total"} += ($total_qenergy + $total_cenergy);
-            $results{"Quantization Energy"}{$conf_count}{"Total"} += $total_qenergy;
-            $results{"Patch Energy"}{$conf_count}{"Total"} +=   0.0;
-            $results{"Memory Energy"}{$conf_count}{"Total"} +=  0.0;
-            $results{"Compute Energy"}{$conf_count}{"Total"} += $total_cenergy;
-            $results{"Unpatch Energy"}{$conf_count}{"Total"} += 0.0;
-            $results{"Leakage Energy"}{$conf_count}{"Total"} += 0.0;
-        }
-
-        $prev = $curr;
-        $layer_count++;
-    }
-
-    print "\n";
-    $conf_count++;
-}
-
-
-#foreach my $config (sort keys %{$results{$attribute}}) {
-#foreach my $layer (sort keys %{$results{$attribute}{$config}}) {
-
-########################################
-print "Printing results\n";
-if ($detailed_results) {
-    my @attributes_to_print = ("Time", "Energy");
-   #foreach my $attribute (@attributes_to_print) {
-    foreach my $attribute (sort keys %results) {
-        print $results_file $attribute . "\n";
-
-        # Print header
-        print $results_file "Configuration,";
-        foreach my $layer (@layers) {
-            print $results_file ${$layer}{"Name"} . ",";
-        }
-        print $results_file "Total,Improvement\n";
-
-        my $baseline = $results{$attribute}{0}{"Total"};
-        my $best_config = undef;
-        my $best_result = undef;
-        for (my $config = 0; $config < $conf_count; $config++) {
-            print $results_file "c$config";
-
-            # This is *really* important. It ensures that each configuration's layer
-            # data is printed in the right order.
-            foreach my $layer (@layers) {
-                my $layer_name = ${$layer}{"Name"};
-                print $results_file "," . $results{$attribute}{$config}{$layer_name};
-            }
-
-            my $val = $results{$attribute}{$config}{"Total"};
-            print $results_file "," . $val;
-            print $results_file "," . ($baseline / ($val + 0.0001));
-            print $results_file "\n";
-
-            if ((!defined $best_result) or ($val < $best_result)) {
-                $best_result = $val;
-                $best_config = $config;
-            }
-            #$best_result = $val if !defined $best_result or $val < $best_result;
-            #$best_config = $config if !defined $best_config or $val < $best_result;
-        }
-
-        print $results_file "\n";
-        print $results_file "c$best_config";
-        print $results_file "," . $results{$attribute}{$best_config}{"Total"};
-        print $results_file "\n\n";
-    }
-} else {
-    my @attributes_to_print = ("Time", "Energy");
-    foreach my $attribute (@attributes_to_print) {
-        print $results_file $attribute . "\n";
-
-        # Print header
-        print $results_file "Configuration,";
-        print $results_file "Total,Improvement\n";
-
-        my $baseline = $results{$attribute}{0}{"Total"};
-        my $best_config = undef;
-        my $best_result = undef;
-        for (my $config = 0; $config < $conf_count; $config++) {
-            print $results_file "c$config";
-
-            my $val = $results{$attribute}{$config}{"Total"};
-            print $results_file "," . $val;
-            print $results_file "," . ($baseline / ($val + 0.0001));
-            print $results_file "\n";
-
-            if ((!defined $best_result) or ($val < $best_result)) {
-                $best_result = $val;
-                $best_config = $config;
-            }
-        }
-
-        print $results_file "\n";
-        print $results_file "c$best_config";
-        print $results_file "," . $results{$attribute}{$best_config}{"Total"};
-        print $results_file "\n\n";
-    }
-}
-
-# Cleanup
-`rm -f blah profile_data.txt`;
-print "Done!\n";
-########################################
-
-sub add_layer {
-    my ($line) = @_;
-    my @tokens = split /,/, $line;
-
-    # The format of each line is:
-    # 0     1  2  3  4  5   6  7   8   9   10
-    # Name, N, C, H, W, Co, C, Kh, Kw, Sh, Sw
-    #       OR
-    # 0     1   2   3   4
-    # Name, RA, CA, RB, CB
-    my %layer;
-    my $layer_name = $tokens[0];
-    $layer{"Name"} = $layer_name;
-    if (is_conv($layer_name)) {
-        $layer{"N"} = $tokens[1];
-        $layer{"C"} = $tokens[2];
-        $layer{"H"} = $tokens[3];
-        $layer{"W"} = $tokens[4];
-        $layer{"Co"} = $tokens[5];
-        $layer{"Kh"} = $tokens[7];
-        $layer{"Kw"} = $tokens[8];
-        $layer{"Sh"} = $tokens[9];
-        $layer{"Sw"} = $tokens[10];
-    } elsif (is_fc($layer_name)) {
-        $layer{"RA"} = $tokens[1];
-        $layer{"CA"} = $tokens[2];
-        $layer{"RB"} = $tokens[3];
-        $layer{"CB"} = $tokens[4];
-    } elsif (not is_nml($layer_name)) {
-        die "Illegal layer name\n";
-    }
-
-    push @layers, \%layer;
-}
-
-sub quantize {
-    my ($curr, $prev, $tensor, %layer) = @_;
-    my $size;
-    my $te;
-
-    # No quantization needed if on same device/knob
-    if ($curr eq $prev) {
-        return (0.0, 0.0);
-    }
-
-    # No quantization needed with smart DMA
-    if ($smart_dma and (($curr eq $PROMISE) or ($prev eq $PROMISE))) {
-        return (0.0, 0.0);
-    }
-
-    my $layer_name = $layer{"Name"};
-    if (is_conv($layer_name)) {
-        # Input + Kernel
-        $size = ($layer{"N"} * $layer{"C"} * $layer{"H"} * $layer{"W"}) + ($layer{"Co"} * $layer{"C"} * $layer{"Kh"} * $layer{"Kw"});
-    } elsif (is_fc($layer_name)) {
-        # Matrix A + matrix B
-        $size = ($layer{"RA"} * $layer{"CA"}) + ($layer{"RB"} * $layer{"CB"});
-    } elsif (not is_nml($layer_name)) {
-        die "This should never, ever happen\n";
-    }
-
-    if ($curr eq $PROMISE) {
-        # We are offloading to PROMISE
-        my $type;
-        if ($prev eq $fp32) {
-            $type = "f2c";
-        } else {
-            $type = "h2c";
-        }
-        $te = `~/awesome_profiler/pp "./quantize $size $type" $iterations blah`;
-        chomp $te;
-    } elsif ($prev eq $PROMISE) {
-        # We are coming back from PROMISE
-        my $type;
-        if ($curr eq $fp32) {
-            $type = "c2f";
-        } else {
-            $type = "c2h";
-        }
-        $te = `~/awesome_profiler/pp "./quantize $size $type" $iterations blah`;
-        chomp $te;
-    } else {
-        # We are converting between FP16 and FP32
-        # Tensor lines look like this: name, FP32 time, FP32 energy, FP16 time, FP16 energy, f2h time, f2h energy, h2f time, h2f energy
-        my @info = @{$tensors{$layer{"Name"}}{$tensor}};
-        if ($curr eq $fp32) {
-            $te = $info[7] . "," . $info[8]; # h2f
-        } else {
-            $te = $info[5] . "," . $info[6]; # f2h
-        }
-    }
-
-    my @temp = split /,/, $te;
-    print "Quantization: ($temp[0], $temp[1])\n";
-    return ($temp[0], $temp[1]);
-}
-
-sub patch {
-    # No patching needed with smart DMA
-    if ($smart_dma) {
-        return (0.0, 0.0);
-    }
-
-    my (%layer) = @_;
-    if (is_conv($layer{"Name"})) {
-        my $te_input = `~/awesome_profiler/pp "./patch $layer{"N"} $layer{"C"} $layer{"H"} $layer{"W"} $layer{"Co"} $layer{"Kh"} $layer{"Kw"} patch" $iterations blah`;
-        my $te_kernel = `~/awesome_profiler/pp "./patch $layer{"N"} $layer{"C"} $layer{"H"} $layer{"W"} $layer{"Co"} $layer{"Kh"} $layer{"Kw"} kernel" $iterations blah`;
-        chomp $te_input;
-        chomp $te_kernel;
-        my @input = split /,/, $te_input;
-        my @kernel = split /,/, $te_kernel;
-        print "Patch: ($input[0] + $kernel[0], $input[1] + $kernel[1])\n";
-        return ($input[0] + $kernel[0], $input[1] + $kernel[1]);
-    } else {
-        return (0.0, 0.0);
-    }
-}
-
-sub unpatch {
-    # No unpatching needed with smart DMA
-    if ($smart_dma) {
-        return (0.0, 0.0);
-    }
-
-    my (%layer) = @_;
-    if (is_conv($layer{"Name"})) {
-        my $te = `~/awesome_profiler/pp "./patch $layer{"N"} $layer{"C"} $layer{"H"} $layer{"W"} $layer{"Co"} $layer{"Kh"} $layer{"Kw"} unpatch" $iterations blah`;
-        chomp $te;
-        my @temp = split /,/, $te;
-        print "Unpatch: ($temp[0], $temp[1])\n";
-        return ($temp[0], $temp[1]);
-    } else {
-        return (0.0, 0.0);
-    }
-}
-
-sub promise {
-    my ($swing, %layer) = @_;
-    my $rows_a;
-    my $cols_a;
-    my $rows_b;
-    my $cols_b;
-    my $patch_factor = 1;
-    my $layer_name = $layer{"Name"};
-
-    if (is_conv($layer_name)) {
-        $rows_a = ($layer{"N"} * $layer{"H"} * $layer{"W"}) / ($layer{"Sh"} * $layer{"Sw"});
-        $cols_a = $layer{"C"} * $layer{"Kh"} * $layer{"Kw"};
-        $rows_b = $cols_a;
-        $cols_b = $layer{"Co"};
-
-        if ($smart_dma) {
-            $patch_factor = $layer{"Kh"} * $layer{"Kw"};
-        }
-    } elsif (is_fc($layer_name)) {
-        $rows_a = $layer{"RA"};
-        $cols_a = $layer{"CA"};
-        $rows_b = $cols_a;
-        $cols_b = $layer{"CB"};
-    } else {
-        # It's either an NML or something else, either way, this is bad
-        die "PROMISE can't run whatever this is!\n";
-    }
-
-    #print "[$rows_a x $cols_a] x [$rows_b x $cols_b] : $swing\n";
-    my $te = `./ptm $rows_a $cols_a $rows_b $cols_b $patch_factor $swing`;
-    chomp $te;
-    my @temp = split /,/, $te;
-    print "PROMISE: ($temp[0], $temp[1])\n";
-    return ($temp[0], $temp[1], $temp[2], $temp[3], $temp[4], $temp[5], $temp[6]);
-}
-
-sub gpu {
-    my ($curr, $layer_name, $tensor) = @_;
-    my @info = @{$tensors{$layer_name}{$tensor}};
-    my $time;
-    my $energy;
-
-    # Tensor lines look like this: name, FP32 time, FP32 energy, FP16 time, FP16 energy, f2h time, f2h energy, h2f time, h2f energy
-    if ($curr eq $fp32) {
-        $time = $info[1];
-        $energy = $info[2];
-    } else {
-        $time = $info[3];
-        $energy = $info[4];
-    }
-
-    print "GPU: ($time, $energy)\n";
-    return ($time, $energy);
-}
-
-sub is_promise {
-    my ($level) = @_;
-    my @sublevels = split / /, $level;
-    if ($sublevels[0] < $fp16_swing) {
-        return 1;
-    } else {
-        return;
-    }
-}
-
-sub is_conv {
-    my ($name) = @_;
-    if ($name =~ /conv/i) {
-        return 1;
-    } else {
-        return;
-    }
-}
-
-sub is_fc {
-    my ($name) = @_;
-    if ($name =~ /fc/i) {
-        return 1;
-    } else {
-        return;
-    }
-}
-
-sub is_nml {
-    my ($name) = @_;
-    if ($name =~ /nml/i) {
-        return 1;
-    } else {
-        return;
-    }
-}
-
diff --git a/llvm/projects/soc_simulator/src/driver.py b/llvm/projects/soc_simulator/src/driver.py
deleted file mode 100644
index dbf2651bd3a9512c46d9e0a549c61290ad913ab0..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/driver.py
+++ /dev/null
@@ -1,306 +0,0 @@
-from collections import defaultdict
-import os
-import subprocess
-import sys
-
-class Driver:
-    fp16_swing = 8
-
-    class ApproxTypes:
-        FP16 = 0
-        FP32 = 1
-        PROMISE = 2
-
-    results_time_key = "Time"
-    results_energy_key = "Energy"
-
-    def __get_str(self, appr):
-        if appr == Driver.ApproxTypes.FP16:
-            return "FP16"
-        elif appr == Driver.ApproxTypes.FP32:
-            return "FP32"
-        elif appr == Driver.ApproxTypes.PROMISE:
-            return "PROMISE"
-
-    def driver(self):
-        self.__parse_tensor_layer_file()
-        self.__parse_tensor_table()
-        self.__run_simulations()
-        self.__display_results()
-
-
-    def __init__(self, layer_filename, table_filename, config_filename, results_filename):
-        self.__layer_filename = layer_filename
-        self.__table_filename = table_filename
-        self.__config_filename = config_filename
-        self.__results_filename = results_filename
-
-        # NOTE: Use an OrderedDict if we want to search by operation name 
-        # Using a list bc we care about the order the data is read in
-        # since it corresponds to the data in the configuration file
-        self.__tensor_layers = []
-
-        # [layer_name][operation_name][cols] 
-        # Operation names need to be stored in order of insertion 
-        self.__tensor_table = defaultdict(lambda: list(defaultdict(str)))
-
-        # [Time/Energy][number corresponding to order the layer config was read in] = time/energy
-        self.__aggregate_results = defaultdict(lambda: defaultdict(float))
-        self.__config_count = 0
-
-
-    @staticmethod
-    def is_conv(operation_name):
-        return operation_name.startswith("Conv")
-
-
-    @staticmethod
-    def is_nml(operation_name):
-        return operation_name.startswith("NML")
-
-
-    @staticmethod
-    def is_fc(operation_name):
-        return operation_name.startswith("FC")
-
-
-    def __parse_tensor_layer_file(self): 
-        if not os.path.isfile(self.__layer_filename):
-            print("ERROR: %s was not found." % self.__layer_filename)
-            exit(1)
-
-        layer_file = open(self.__layer_filename, "r")
-        for line in layer_file:
-            layer_data = line.strip().split(',')
-            layer_name = layer_data[0]
-
-            tensor_layer = defaultdict(str)
-            tensor_layer["Name"] = layer_name
-
-            if Driver.is_conv(layer_name):
-                tensor_layer["N"] = float(layer_data[1])
-                tensor_layer["Cin"] = float(layer_data[2])
-                tensor_layer["H"] = float(layer_data[3])
-                tensor_layer["W"] = float(layer_data[4])
-                tensor_layer["Cout"] = float(layer_data[5])
-                tensor_layer["Kh"] = float(layer_data[7])
-                tensor_layer["Kw"] = float(layer_data[8])
-                tensor_layer["Sh"] = float(layer_data[9])
-                tensor_layer["Sw"] = float(layer_data[10])
-
-            elif Driver.is_fc(layer_name):
-                tensor_layer["RA"] = float(layer_data[1])
-                tensor_layer["CA"] = float(layer_data[2])
-                tensor_layer["RB"] = float(layer_data[3])
-                tensor_layer["CB"] = float(layer_data[4])
-
-            elif not Driver.is_nml(layer_name): # TODO should we store data for NMLs?
-                print("ERROR: Invalid layer name %s" % layer_name)
-                exit(1)
-
-            self.__tensor_layers.append(tensor_layer)
-        layer_file.close()
-
-
-    def __parse_tensor_table(self): 
-        if not os.path.isfile(self.__table_filename):
-            print("ERROR: %s was not found." % self.__table_filename)
-            exit(1)
-        table_file = open(self.__table_filename, "r")
-        line = table_file.readline().strip()
-
-        while line:
-            # Line here MUST be a header or there's a bug 
-            # Get the description of the layer 
-            assert(line.startswith("**"))
-
-            header_contents = line.split(' ')[1:] 
-            layer_name = header_contents[0]
-            num_ops = int(header_contents[1])
-            col_names = header_contents[2:]
-
-            layer_operations = []
-
-            # Go through all operations in the layer
-            for op_count in range(num_ops):
-                operation_data = defaultdict(str)
-
-                line = table_file.readline().strip()
-                op_data = line.split(' ')
-                op_name = op_data[0]
-                operation_data["Name"] = op_name
-
-                # Number of data items (#s) needs to match up with the # of cols 
-                assert(len(op_data) - 1 == len(col_names)) 
-
-                # Go through all data items (each col element) per operation 
-                for i in range(len(col_names)):
-                    operation_data[col_names[i]] = float(op_data[i + 1])
-
-                layer_operations.append(operation_data)
-
-            self.__tensor_table[layer_name] = layer_operations
-            line = table_file.readline().strip()
-        table_file.close()
-
-
-    @staticmethod
-    def is_promise(config_layer):
-        return float(config_layer.split(' ')[0]) < Driver.fp16_swing
-
-
-    def __quantize(self, curr_layer, prev_layer, h2f_f2h_operation_ind, layer_data):
-        print(self.__get_str(curr_layer), self.__get_str(prev_layer), h2f_f2h_operation_ind)
-        if curr_layer == prev_layer or curr_layer == Driver.ApproxTypes.PROMISE \
-                    or prev_layer == Driver.ApproxTypes.PROMISE: # No quantization needed
-            return 0.0, 0.0
-       
-        layer_name = layer_data["Name"]
-
-        # NOTE: Ignoring logic where curr == promise or prev == promise bc 
-        # smartDMA is always true so we'd return near the beginning of the method
-
-        # Get h2f/f2h data using the first tensor operation in the layer
-        # (which is why order matters in the tensor table)
-        tensor_op_row = self.__tensor_table[layer_name][h2f_f2h_operation_ind]  
-        if curr_layer == Driver.ApproxTypes.FP32:
-            time = tensor_op_row["h2f_time"]
-            energy = tensor_op_row["h2f_energy"]
-        elif curr_layer == Driver.ApproxTypes.FP16:
-            time = tensor_op_row["f2h_time"]
-            energy = tensor_op_row["f2h_energy"]
-
-        print("Quantization: (%f, %f)" % (time, energy))
-        return (time, energy)
-
-
-    def __run_promise_simulation(self, swing, layer_data):
-        layer_name = layer_data["Name"] 
-        patch_factor = 1 
-
-        if Driver.is_conv(layer_name): 
-            rows_a = layer_data["N"] * layer_data["H"] * layer_data["W"] \
-                    / (layer_data["Sh"] * layer_data["Sw"])
-            cols_a = layer_data["Cin"] * layer_data["Kh"] * layer_data["Kw"]
-            rows_b = cols_a
-            cols_b = layer_data["Cout"]
-            patch_factor = layer_data["Kh"] * layer_data["Kw"]
-        elif Driver.is_fc(layer_name):
-            rows_a = layer_data["RA"] 
-            cols_a = layer_data["CA"]
-            rows_b = cols_a
-            cols_b = layer_data["CB"]
-        else:
-            print("PROMISE can't run whatever this layer is.")
-            exit(1)
-        # Run promise simulator
-        # TODO need to print time and energy in the ptm runner so we can pipe it
-        output = subprocess.Popen(["./ptm_new", str(rows_a), str(cols_a), str(rows_b), \
-                    str(cols_b), str(patch_factor), str(swing)], \
-                    stdout = subprocess.PIPE, stderr = subprocess.PIPE).communicate()[0]
-        total_time_energy = output.strip().split(',')
-
-        assert(len(total_time_energy) == 2)
-        print("PROMISE: (%s, %s)" % (total_time_energy[0], total_time_energy[1]))
-        return float(total_time_energy[0]), float(total_time_energy[1])
-
-
-    def __run_gpu_simulation(self, curr_layer, layer_name, tensor_ind):
-        tensor_info = self.__tensor_table[layer_name][tensor_ind]
-        if curr_layer == Driver.ApproxTypes.FP32:
-            conversion_time = tensor_info["fp32_time"]
-            conversion_energy = tensor_info["fp32_energy"]
-        else:
-            conversion_time = tensor_info["fp16_time"]
-            conversion_energy = tensor_info["fp16_energy"]
-        print("GPU: (%f, %f)" % (conversion_time, conversion_energy))
-        return (conversion_time, conversion_energy)
-
-
-    def __run_simulations(self):
-        if not os.path.isfile(self.__config_filename):
-            print("ERROR: %s was not found" % self.__config_filename)
-            exit(1)
-
-        config_file = open(self.__config_filename, "r")
-      
-        # each line = indepedent configuration
-        # layers are separated by commas
-        # tensor ops are separated by spaces
-        for config in config_file:
-            config_layers = config.strip().split(',')
-            prev_layer = Driver.ApproxTypes.FP32
-            curr_layer = None
-
-            for layer_ind, config_layer in enumerate(config_layers): # level
-                layer_data = self.__tensor_layers[layer_ind]  # layer
-                layer_name = layer_data["Name"]
-                if Driver.is_promise(config_layer):
-                    print("Running layer %s on PROMISE" % layer_name)
-                    curr_layer = Driver.ApproxTypes.PROMISE
-                    quant_time, quant_energy = self.__quantize(curr_layer, prev_layer, 0, layer_data)
-                    # Compute 
-                    time, energy = self.__run_promise_simulation(config_layer, layer_data)
-                    self.__aggregate_results[Driver.results_time_key][self.__config_count] += time
-                    self.__aggregate_results[Driver.results_energy_key][self.__config_count] += energy 
-                else:
-                    print("Running layer %s on the GPU" % layer_name)
-                    tensor_ops = config_layer.split(' ')
-
-                    total_time = 0
-                    total_energy = 0
-                    for tensor_ind, tensor_op in enumerate(tensor_ops): # sublevle
-                        tensor_op = int(tensor_op)
-                        if tensor_op == Driver.fp16_swing:
-                            curr_layer = Driver.ApproxTypes.FP16
-                        else:
-                            curr_layer = Driver.ApproxTypes.FP32
-
-                        quant_time, quant_energy = self.__quantize(curr_layer, prev_layer, tensor_ind, layer_data)
-                        conv_time, conv_energy = self.__run_gpu_simulation(curr_layer, layer_name, tensor_ind)
-                        total_time += quant_time + conv_time 
-                        total_energy += quant_energy + conv_energy
-                        prev_layer = curr_layer
-
-                    self.__aggregate_results[Driver.results_time_key][self.__config_count] += total_time
-                    self.__aggregate_results[Driver.results_energy_key][self.__config_count] += total_energy 
-                prev_layer = curr_layer
-
-            self.__config_count += 1
-            print("\n")
-        config_file.close()
-
-
-    def __display_results(self):
-        results_file = open(self.__results_filename, "w")
-        attributes_to_print = [Driver.results_time_key, Driver.results_energy_key]
-
-        for attribute in attributes_to_print:
-            results_file.write("%s\n" % attribute)
-            results_file.write("Configuration,Total,Improvement\n") 
-
-            baseline_val = self.__aggregate_results[attribute][0]
-            print(baseline_val)
-            best_config = None
-            best_result = None
-
-            for config_ind in range(self.__config_count):
-                results_file.write("c%d" % config_ind)
-                time_or_energy_val = self.__aggregate_results[attribute][config_ind]
-
-                # Using repr to keep all decimal digits when writing to file
-                results_file.write(",%s" % repr(time_or_energy_val))
-                results_file.write(",%s\n" % repr(baseline_val / (time_or_energy_val + 0.0001)))
-
-                if not best_result or time_or_energy_val < best_result:
-                    best_result = time_or_energy_val
-                    best_config = config_ind
-            results_file.write("\nc%d,%s\n\n" % (best_config, repr(self.__aggregate_results[attribute][best_config])))
-        results_file.close()
-
-
-if __name__ == "__main__":
-    if len(sys.argv) != 5:
-        print("Usage: python driver.py <layer info> <tensor info> <configurations> <results file>")
-        exit(1)
-    Driver(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4]).driver()
diff --git a/llvm/projects/soc_simulator/src/driver_new_config_fp16_repl.py b/llvm/projects/soc_simulator/src/driver_new_config_fp16_repl.py
deleted file mode 100644
index d6c3d63112c83cd9b545914a9a33f4c5b5dae6ce..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/driver_new_config_fp16_repl.py
+++ /dev/null
@@ -1,515 +0,0 @@
-from collections import defaultdict
-import os
-import subprocess
-import sys
-
-class Driver:
-    class PrecisionTypes:
-        FP16 = 0
-        FP32 = 1
-        PROMISE = 2
-
-    class ApproxTypes:
-        PERF = 3 
-        SAMP = 4
-        REDUCE = 5
-
-    results_time_key = "Time"
-    results_energy_key = "Energy"
-
-
-    def __init__(self, layer_filename, table_filename, config_filename, results_filename):
-        self.__layer_filename = layer_filename
-        self.__table_filename = table_filename
-        self.__config_filename = config_filename
-        self.__results_filename = results_filename
-
-        # NOTE: Use an OrderedDict if we want to search by operation name 
-        # Using a list bc we care about the order the data is read in
-        # since it corresponds to the data in the configuration file
-        self.__tensor_layers = []
-
-        # [layer_name][operation_name][cols] 
-        # Operation names need to be stored in order of insertion 
-        self.__tensor_table = defaultdict(lambda: list(defaultdict(str)))
-
-        self.__conf_results = [] # indexed 
-        #self.__conf_results = {} # {conf name: (first line, [[layer value if promise], [tensor vals if gpu]])}
-
-
-    @staticmethod
-    def is_conv(operation_name):
-        return operation_name.startswith("Conv")
-
-
-    @staticmethod
-    def is_nml(operation_name):
-        return operation_name.startswith("NML")
-
-
-    @staticmethod
-    def is_fc(operation_name):
-        return operation_name.startswith("FC")
-
-
-    # FOR DEBUGGING ONLY
-    def __get_str(self, appr):
-        if appr == Driver.PrecisionTypes.FP16:
-            return "FP16"
-        elif appr == Driver.PrecisionTypes.FP32:
-            return "FP32"
-        elif appr == Driver.PrecisionTypes.PROMISE:
-            return "PROMISE"
-        elif appr == Driver.ApproxTypes.PERF:
-            return "PERF"
-        elif appr == Driver.ApproxTypes.SAMP:
-            return "SAMP"
-        elif appr == Driver.ApproxTypes.REDUCE:
-            return "REDUCE"
-
-    def driver(self):
-        self.__parse_tensor_layer_file()
-        self.__parse_tensor_table()
-        self.__run_simulations()
-        self.__write_output()
-
-
-    def __parse_tensor_layer_file(self): 
-        if not os.path.isfile(self.__layer_filename):
-            print("ERROR: %s was not found." % self.__layer_filename)
-            exit(1)
-        layer_file = open(self.__layer_filename, "r")
-        for line in layer_file:
-            layer_data = line.strip().split(',')
-            layer_name = layer_data[0]
-
-            tensor_layer = defaultdict(str)
-            tensor_layer["Name"] = layer_name
-
-            if Driver.is_conv(layer_name):
-                tensor_layer["N"] = float(layer_data[1])
-                tensor_layer["Cin"] = float(layer_data[2])
-                tensor_layer["H"] = float(layer_data[3])
-                tensor_layer["W"] = float(layer_data[4])
-                tensor_layer["Cout"] = float(layer_data[5])
-                tensor_layer["Kh"] = float(layer_data[7])
-                tensor_layer["Kw"] = float(layer_data[8])
-                tensor_layer["Sh"] = float(layer_data[9])
-                tensor_layer["Sw"] = float(layer_data[10])
-
-            elif Driver.is_fc(layer_name):
-                tensor_layer["RA"] = float(layer_data[1])
-                tensor_layer["CA"] = float(layer_data[2])
-                tensor_layer["RB"] = float(layer_data[3])
-                tensor_layer["CB"] = float(layer_data[4])
-
-            elif not Driver.is_nml(layer_name): # TODO should we store data for NMLs?
-                print("ERROR: Invalid layer name %s" % layer_name)
-                exit(1)
-
-            self.__tensor_layers.append(tensor_layer)
-        layer_file.close()
-
-
-    def __parse_tensor_table(self): 
-        if not os.path.isfile(self.__table_filename):
-            print("ERROR: %s was not found." % self.__table_filename)
-            exit(1)
-        table_file = open(self.__table_filename, "r")
-        line = table_file.readline().strip()
-
-        while line:
-            # Line here MUST be a header or there's a bug 
-            # Get the description of the layer 
-            assert(line.startswith("**"))
-            header_contents = line.split(' ')[1:] 
-            layer_name = header_contents[0]
-            num_ops = int(header_contents[1])
-            col_names = header_contents[2:]
-
-            layer_operations = []
-
-            # Go through all operations in the layer
-            for op_count in range(num_ops):
-                operation_data = defaultdict(str)
-
-                line = table_file.readline().strip()
-                op_data = line.split(' ')
-                op_name = op_data[0]
-                operation_data["Name"] = op_name
-
-                # Number of data items (#s) needs to match up with the # of cols 
-                assert(len(op_data) - 1 == len(col_names))
-
-                # Go through all data items (each col element) per operation 
-                for i in range(len(col_names)):
-                    operation_data[col_names[i]] = float(op_data[i + 1])
-
-                layer_operations.append(operation_data)
-
-            self.__tensor_table[layer_name] = layer_operations
-            line = table_file.readline().strip()
-        table_file.close()
-
-
-    @staticmethod
-    def is_promise(layer_hardware):
-        return layer_hardware == "promise"
-
-    @staticmethod
-    def is_gpu(layer_hardware):
-        return layer_hardware == "gpu"
-
-    def __run_simulations(self):
-        config_file = open(self.__config_filename, "r")
-        line = config_file.readline().strip()
-
-        while line: 
-            assert(line == "+++++")
-            print("CONFIGURATION")
-           
-            curr_conf_results = []
-
-            prev_layer = Driver.PrecisionTypes.FP32
-            curr_layer = None
-
-            line = config_file.readline().strip()
-            first_line = line
-            conf_name = line.split(' ')[0]
-            print("CONF NAME: %s" % conf_name)
-            assert(conf_name.startswith("conf"))
-            line = config_file.readline().strip()
-
-            while line != "-----":
-                layer_as_lst = line.split(' ')
-                layer_results = []
-                # Skip softmax
-                if line.find("softmax") != -1:
-                    layer_results.append((0, 0, ' '.join(layer_as_lst[2:])))
-                    curr_conf_results.append((layer_as_lst[1], layer_results))
-                    line = config_file.readline().strip()
-                    continue
-                layer_ind = int(layer_as_lst[0]) - 1
-                layer_table_data = self.__tensor_layers[layer_ind]
-                layer_name = layer_table_data["Name"]
-
-                if Driver.is_promise(layer_as_lst[1]):
-                    print("Running layer %s on PROMISE" % layer_name)
-                    curr_layer = Driver.PrecisionTypes.PROMISE
-
-                    total_time = 0
-                    total_energy = 0
-
-                    # To support multiple sets of <param> <number> in the future
-                    for i in range(2, len(layer_as_lst), 2):
-                        param_name = layer_as_lst[i] # Use when there's more than 1 type of param 
-                        param_val = int(layer_as_lst[i + 1])
-                        time, energy = self.__run_promise_simulation(param_val, layer_table_data)
-                        total_time += time
-                        total_energy += energy
-                        print("Curr promise: ", time, energy)
-                    print("Total promise: ", total_time, total_energy)
-                    layer_results.append((total_time, total_energy, ' '.join(layer_as_lst[2:])))
-
-                elif Driver.is_gpu(layer_as_lst[1]):
-                    print("Running layer %s on the GPU" % layer_name)
-
-                    tensor_count = 0 
-
-                    # 3 elements per tensor operation 
-                    for i in range(2, len(layer_as_lst), 3):
-                        op_type = layer_as_lst[i]
-                        precision_type = layer_as_lst[i + 1]
-                        op_number = layer_as_lst[i + 2]
-                        #print(' '.join(layer_as_lst[i : i + 3]))
-
-                        approx_type = None
-                        if line.find("fp16") != -1:
-                            curr_layer = Driver.PrecisionTypes.FP16
-                        elif line.find("fp32") != -1:
-                            curr_layer = Driver.PrecisionTypes.FP32
-                        if precision_type == "perf" or precision_type == "samp" or precision_type == "reduce": # Handle approx type
-                            if precision_type == "perf": 
-                                approx_type = Driver.ApproxTypes.PERF
-                            elif precision_type == "samp": 
-                                approx_type = Driver.ApproxTypes.SAMP
-                            elif precision_type == "reduce":
-                                approx_type = Driver.ApproxTypes.REDUCE
-                            curr_layer = Driver.PrecisionTypes.FP16
-                        quant_time, quant_energy = self.__quantize(precision_type, op_number, curr_layer, prev_layer, tensor_count, layer_table_data)
-                        if quant_time != 0:
-                            assert i == 2 #and layer_ind == 0
-                        conv_time, conv_energy = self.__run_gpu_simulation(curr_layer, layer_name, \
-                                    tensor_count, approx_type, op_number) 
-                        print(quant_time, conv_time)
-                        layer_results.append((quant_time + conv_time, quant_energy + conv_energy, ' '.join(layer_as_lst[i : i + 3])))
-                        prev_layer = curr_layer
-                        tensor_count += 1
-                line = config_file.readline().strip()
-                prev_layer = curr_layer
-                curr_conf_results.append((layer_as_lst[1], layer_results))
-
-            if not self.__conf_results: # we're appending the baseline
-                # need to find the fp16 baseline
-                self.fp16_baseline = []
-
-                prev_layer = Driver.PrecisionTypes.FP32
-                curr_layer = None
-
-                has_quantized = False
-                for layer_ind, (hardware, layer) in enumerate(curr_conf_results):
-                    if layer[0][2].find("softmax") != -1: continue
-                    fp16_layer = []
-                    layer_table_data = self.__tensor_layers[layer_ind]
-                    layer_name = layer_table_data["Name"]
-
-                    for tensor_ind, (op_time, op_energy, tensor_op) in enumerate(layer): 
-                        curr_layer = Driver.PrecisionTypes.FP16 # always
-
-                        quant_time, quant_energy = self.__quantize("fp16", "1", curr_layer, prev_layer, tensor_ind, layer_table_data)
-                        if quant_time != 0:
-                            assert not has_quantized
-                            has_quantized = True
-                        tensor_info = self.__tensor_table[layer_name][tensor_ind]
-                        fp16_time = tensor_info["fp16_time"] + quant_time
-                        fp16_energy = tensor_info["fp16_energy"] + quant_energy
-                        fp16_layer.append((fp16_time, fp16_energy, tensor_op.replace("fp32", "fp16")))  
-                        prev_layer = curr_layer
-
-                    prev_layer = curr_layer
-                    self.fp16_baseline.append((hardware, fp16_layer))
-            self.__conf_results.append( (first_line, curr_conf_results) )
-            line = config_file.readline().strip()
-        config_file.close()
-
-
-    def __quantize(self, precision_type, op_number, curr_layer, prev_layer, h2f_f2h_operation_ind, layer_data):
-        if curr_layer == prev_layer or curr_layer == Driver.PrecisionTypes.PROMISE \
-                    or prev_layer == Driver.PrecisionTypes.PROMISE:
-            return 0.0, 0.0
-        layer_name = layer_data["Name"]
-        print("QUANTIZATION")
-        print(precision_type, op_number, self.__get_str(curr_layer), self.__get_str(prev_layer), h2f_f2h_operation_ind, layer_data)
-        # NOTE: Ignoring logic where curr == promise or prev == promise bc 
-        # smartDMA is always true so we'd return near the beginning of the method
-
-        # Get h2f/f2h data using the first tensor operation in the layer
-        # (which is why order matters in the tensor table)
-        tensor_op_row = self.__tensor_table[layer_name][h2f_f2h_operation_ind]  
-        time_key = None
-        energy_key = None
-
-        if op_number == "1":
-            lookup_key = "_" #lookup_key = precision_type
-        else:
-            lookup_key = "_" + precision_type + str(op_number) + "_"
-
-        if curr_layer == Driver.PrecisionTypes.FP32:
-            time_key = "h2f%stime" % lookup_key
-            energy_key = "h2f%senergy" % lookup_key
-        elif curr_layer == Driver.PrecisionTypes.FP16:
-            time_key = "f2h%stime" % lookup_key
-            energy_key = "f2h%senergy" % lookup_key
-        print(tensor_op_row)
-        time = tensor_op_row[time_key]
-        energy = tensor_op_row[energy_key]
-        print(time_key, energy_key)
-        return (time, energy)
-
-
-    def __run_promise_simulation(self, swing, layer_data):
-        layer_name = layer_data["Name"] 
-        patch_factor = 1 
-
-        if Driver.is_conv(layer_name): 
-            rows_a = layer_data["N"] * layer_data["H"] * layer_data["W"] \
-                    / (layer_data["Sh"] * layer_data["Sw"])
-            cols_a = layer_data["Cin"] * layer_data["Kh"] * layer_data["Kw"]
-            rows_b = cols_a
-            cols_b = layer_data["Cout"]
-            patch_factor = layer_data["Kh"] * layer_data["Kw"]
-        elif Driver.is_fc(layer_name):
-            rows_a = layer_data["RA"] 
-            cols_a = layer_data["CA"]
-            rows_b = layer_data["RB"] 
-            cols_b = layer_data["CB"]
-        else:
-            print("PROMISE can't run whatever this layer is.")
-            exit(1)
-        # Run promise simulator
-        # TODO need to print time and energy in the ptm runner so we can pipe it
-        output = subprocess.Popen(["./ptm_new", str(rows_a), str(cols_a), str(rows_b), \
-                    str(cols_b), str(patch_factor), str(swing)], \
-                    stdout = subprocess.PIPE, stderr = subprocess.PIPE).communicate()[0]
-        total_time_energy = output.strip().split(',')
-
-        assert(len(total_time_energy) == 2)
-        return float(total_time_energy[0]), float(total_time_energy[1])
-
-
-    def __run_gpu_simulation(self, curr_layer, layer_name, tensor_ind, \
-                    approx_type = None, knob_number = None):
-        tensor_info = self.__tensor_table[layer_name][tensor_ind]
-        time_key = None
-        energy_key = None
-
-        if approx_type == Driver.ApproxTypes.PERF or approx_type == Driver.ApproxTypes.SAMP or approx_type == Driver.ApproxTypes.REDUCE: # fp16_perf2_energy
-            approx_type_str = None
-            if approx_type == Driver.ApproxTypes.PERF:
-                approx_type_str = "perf"
-            elif approx_type == Driver.ApproxTypes.SAMP: 
-                approx_type_str = "samp"
-            elif approx_type == Driver.ApproxTypes.REDUCE:
-                approx_type_str = "reduce"
-
-            if curr_layer == Driver.PrecisionTypes.FP32:
-                time_key = "fp32_%s%s_time" % (approx_type_str, knob_number)
-                energy_key = "fp32_%s%s_energy" % (approx_type_str, knob_number)
-
-            elif curr_layer == Driver.PrecisionTypes.FP16:
-                time_key = "fp16_%s%s_time" % (approx_type_str, knob_number)
-                energy_key = "fp16_%s%s_energy" % (approx_type_str, knob_number)
-
-        else: # None for now
-            if curr_layer == Driver.PrecisionTypes.FP32:
-                time_key = "fp32_time"
-                energy_key = "fp32_energy"
-
-            elif curr_layer == Driver.PrecisionTypes.FP16:
-                time_key = "fp16_time"
-                energy_key = "fp16_energy"
-        #print(time_key, energy_key)
-        conversion_time = tensor_info[time_key]
-        conversion_energy = tensor_info[energy_key]
-        #print("GPU: (%f, %f)\n" % (conversion_time, conversion_energy))
-        return conversion_time, conversion_energy
-
-
-    def __write_output(self):
-        config_file = open(self.__config_filename, "r")
-        results_file = open(self.__results_filename, "w")
-
-        def write_conf_to_file(conf_name, final_conf, time_speedup, energy_speedup):
-            # conf = [layer value if promise], [tensor vals if gpu]]
-            conf_str = ["+++++"]
-
-            # process the first line
-            first_line, layers = final_conf
-            first_line_lst = first_line.split(' ')
-            assert first_line_lst[0] == conf_name
-            
-            new_header = [conf_name]
-            new_header.append(repr(time_speedup))
-            new_header.append(repr(energy_speedup))
-            new_header.append(repr(abs(float(first_line_lst[-2]))))
-            new_header.append(repr(abs(float(first_line_lst[-1]))))
-            conf_str.append(' '.join(new_header))
-           
-            for ind, (hardware, layer) in enumerate(layers):
-                layer_lst = [str(ind + 1)]
-                layer_lst.append(hardware)
-                for op_time, op_energy, tensor_op in layer:
-                    layer_lst.append(tensor_op) 
-                conf_str.append(' '.join(layer_lst))
-            conf_str.append("-----\n")
-            results_file.write('\n'.join(conf_str))
-
-        fp32_baseline_conf = None
-        baseline_total_time = baseline_total_energy = 0 
-
-        def get_baseline_times_energies(conf):
-            curr_time = curr_energy = 0
-            for hardware, layer in conf[1]:
-                for op_time, op_energy, tensor_op in layer:
-                    curr_time += op_time
-                    curr_energy += op_energy
-            return curr_time, curr_energy
-
-        def get_final_times_energies_conf(curr_conf, curr_conf_name):
-            final_time = final_energy = 0
-
-            final_conf = [] # List (conf) of lists (layers) of tuples (operation data)
-
-            #for hardware, layer in self.fp16_baseline:
-                #print(hardware, layer)
-            for layer_ind, (hardware, layer) in enumerate(curr_conf[1]):
-                final_conf_layer = []
-
-                for tensor_ind, (op_time, op_energy, tensor_op) in enumerate(layer):
-                    if tensor_op.find("softmax") != -1:
-                        final_conf_layer.append((None, None, tensor_op))
-                        continue
-                    # layer name, operation name, val name
-                    if tensor_op.find("promise") != -1: # compute sum of entire fp16 baseline layer
-                        baseline_time = 0
-                        baseline_energy = 0
-                        baseline_op = []
-
-                        if tensor_op.find("fp32") != -1:
-                            assert False
-                            baseline_layer = fp32_baseline_conf[layer_ind][1]
-                        else:
-                            baseline_layer = self.fp16_baseline[layer_ind][1]
-
-                        for op_time, op_energy, tensor_op in baseline_layer:
-                            baseline_time += op_time
-                            baseline_energy += op_energy
-                            baseline_op.append(tensor_op)
-                    else: # look at the individual tensor operation as before
-                        if tensor_op.find("fp32") != -1:
-                            assert False
-                            baseline_layer = fp32_baseline_conf[1][layer_ind]
-                        else:
-                            baseline_layer = self.fp16_baseline[layer_ind][1]
-                        baseline_time = baseline_layer[tensor_ind][0]
-                        baseline_energy = baseline_layer[tensor_ind][1]
-                        baseline_op = baseline_layer[tensor_ind][2]
-
-                    final_tensor_op = tensor_op
-                    if op_time > baseline_time:
-                        print("**************** BIGGER ******************")
-                        print(curr_conf_name)
-                        print(baseline_time, baseline_energy, baseline_op, layer_ind)
-                        print(op_time, tensor_op, layer_ind)
-                        final_time += baseline_time
-                        final_energy += baseline_energy
-                        final_tensor_op = baseline_op
-                    else:
-                        print("**************** SMALLER ******************")
-                        print(curr_conf_name)
-                        print(baseline_time, baseline_energy, baseline_op, layer_ind)
-                        print(op_time, tensor_op, layer_ind)
-                        final_time += op_time
-                        final_energy += op_energy
-
-                    final_conf_layer.append((None, None, final_tensor_op)) # Don't care about the times and energies when writing
-                final_conf.append((hardware, final_conf_layer))
-            #print("\n")
-            return final_time, final_energy, (curr_conf[0], final_conf) 
-
-        conf_index = 0
-        print("RESULTS")
-        for line in config_file:
-            if line.startswith("conf"):
-                orig_line_lst = line.split(' ')
-                conf_name = orig_line_lst[0]
-
-                if not fp32_baseline_conf:
-                    fp32_baseline_conf = self.__conf_results[conf_index] #conf_name]
-                    baseline_total_time, baseline_total_energy = get_baseline_times_energies(fp32_baseline_conf)
-                    results_file.write("%s\n" % repr(baseline_total_time))
-                    write_conf_to_file(conf_name, fp32_baseline_conf, 1, 1)
-                else:
-                    curr_conf = self.__conf_results[conf_index] #conf_name]
-                    final_time, final_energy, curr_conf = get_final_times_energies_conf(curr_conf, conf_name)
-                    print("Baseline time: %f, final time: %f, baseline energy: %f, final energy: %f, rations: %f %f " % (baseline_total_time, final_time, baseline_total_energy, final_energy, baseline_total_time / final_time, baseline_total_energy / final_energy))
-                    write_conf_to_file(conf_name, curr_conf, baseline_total_time / final_time, baseline_total_energy / final_energy) 
-                conf_index += 1
-        results_file.close()
-        config_file.close()
-
-if __name__ == "__main__":
-    if len(sys.argv) != 5:
-        print("Usage: python driver.py <layer info> <tensor info> <configurations> <results file>")
-        exit(1)
-    Driver(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4]).driver()
diff --git a/llvm/projects/soc_simulator/src/driver_new_config_no_fp16_repl.py b/llvm/projects/soc_simulator/src/driver_new_config_no_fp16_repl.py
deleted file mode 100644
index d12477fd77533f94ff067e05771459ff4c830bb8..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/driver_new_config_no_fp16_repl.py
+++ /dev/null
@@ -1,464 +0,0 @@
-from collections import defaultdict
-import os
-import subprocess
-import sys
-
-class Driver:
-    fp16_swing = 8
-
-    class PrecisionTypes:
-        FP16 = 0
-        FP32 = 1
-        PROMISE = 2
-
-    class ApproxTypes:
-        PERF = 3 
-        SAMP = 4
-
-    results_time_key = "Time"
-    results_energy_key = "Energy"
-
-
-    def __init__(self, layer_filename, table_filename, config_filename, results_filename):
-        self.__layer_filename = layer_filename
-        self.__table_filename = table_filename
-        self.__config_filename = config_filename
-        self.__results_filename = results_filename
-
-        # NOTE: Use an OrderedDict if we want to search by operation name 
-        # Using a list bc we care about the order the data is read in
-        # since it corresponds to the data in the configuration file
-        self.__tensor_layers = []
-
-        # [layer_name][operation_name][cols] 
-        # Operation names need to be stored in order of insertion 
-        self.__tensor_table = defaultdict(lambda: list(defaultdict(str)))
-
-        self.__conf_results = [] # indexed 
-        #self.__conf_results = {} # {conf name: (first line, [[layer value if promise], [tensor vals if gpu]])}
-
-
-    @staticmethod
-    def is_conv(operation_name):
-        return operation_name.startswith("Conv")
-
-
-    @staticmethod
-    def is_nml(operation_name):
-        return operation_name.startswith("NML")
-
-
-    @staticmethod
-    def is_fc(operation_name):
-        return operation_name.startswith("FC")
-
-
-    # FOR DEBUGGING ONLY
-    def __get_str(self, appr):
-        if appr == Driver.PrecisionTypes.FP16:
-            return "FP16"
-        elif appr == Driver.PrecisionTypes.FP32:
-            return "FP32"
-        elif appr == Driver.PrecisionTypes.PROMISE:
-            return "PROMISE"
-        elif appr == Driver.ApproxTypes.PERF:
-            return "PERF"
-        elif appr == Driver.ApproxTypes.SAMP:
-            return "SAMP"
-
-
-    def driver(self):
-        self.__parse_tensor_layer_file()
-        self.__parse_tensor_table()
-        self.__run_simulations()
-        self.__write_output()
-
-
-    def __parse_tensor_layer_file(self): 
-        if not os.path.isfile(self.__layer_filename):
-            print("ERROR: %s was not found." % self.__layer_filename)
-            exit(1)
-        layer_file = open(self.__layer_filename, "r")
-        for line in layer_file:
-            layer_data = line.strip().split(',')
-            layer_name = layer_data[0]
-
-            tensor_layer = defaultdict(str)
-            tensor_layer["Name"] = layer_name
-
-            if Driver.is_conv(layer_name):
-                tensor_layer["N"] = float(layer_data[1])
-                tensor_layer["Cin"] = float(layer_data[2])
-                tensor_layer["H"] = float(layer_data[3])
-                tensor_layer["W"] = float(layer_data[4])
-                tensor_layer["Cout"] = float(layer_data[5])
-                tensor_layer["Kh"] = float(layer_data[7])
-                tensor_layer["Kw"] = float(layer_data[8])
-                tensor_layer["Sh"] = float(layer_data[9])
-                tensor_layer["Sw"] = float(layer_data[10])
-
-            elif Driver.is_fc(layer_name):
-                tensor_layer["RA"] = float(layer_data[1])
-                tensor_layer["CA"] = float(layer_data[2])
-                tensor_layer["RB"] = float(layer_data[3])
-                tensor_layer["CB"] = float(layer_data[4])
-
-            elif not Driver.is_nml(layer_name): # TODO should we store data for NMLs?
-                print("ERROR: Invalid layer name %s" % layer_name)
-                exit(1)
-
-            self.__tensor_layers.append(tensor_layer)
-        layer_file.close()
-
-
-    def __parse_tensor_table(self): 
-        if not os.path.isfile(self.__table_filename):
-            print("ERROR: %s was not found." % self.__table_filename)
-            exit(1)
-        table_file = open(self.__table_filename, "r")
-        line = table_file.readline().strip()
-
-        while line:
-            # Line here MUST be a header or there's a bug 
-            # Get the description of the layer 
-            assert(line.startswith("**"))
-            header_contents = line.split(' ')[1:] 
-            layer_name = header_contents[0]
-            num_ops = int(header_contents[1])
-            col_names = header_contents[2:]
-
-            layer_operations = []
-
-            # Go through all operations in the layer
-            for op_count in range(num_ops):
-                operation_data = defaultdict(str)
-
-                line = table_file.readline().strip()
-                op_data = line.split(' ')
-                op_name = op_data[0]
-                operation_data["Name"] = op_name
-
-                # Number of data items (#s) needs to match up with the # of cols 
-                assert(len(op_data) - 1 == len(col_names))
-
-                # Go through all data items (each col element) per operation 
-                for i in range(len(col_names)):
-                    operation_data[col_names[i]] = float(op_data[i + 1])
-
-                layer_operations.append(operation_data)
-
-            self.__tensor_table[layer_name] = layer_operations
-            line = table_file.readline().strip()
-        table_file.close()
-
-
-    @staticmethod
-    def is_promise(layer_hardware):
-        return layer_hardware == "promise"
-
-    @staticmethod
-    def is_gpu(layer_hardware):
-        return layer_hardware == "gpu"
-
-    def __run_simulations(self):
-        config_file = open(self.__config_filename, "r")
-        line = config_file.readline().strip()
-
-        while line: 
-            assert(line == "+++++")
-            print("CONFIGURATION")
-           
-            curr_conf_results = []
-
-            prev_layer = Driver.PrecisionTypes.FP32
-            curr_layer = None
-
-            line = config_file.readline().strip()
-            first_line = line
-            conf_name = line.split(' ')[0]
-            print("CONF NAME: %s" % conf_name)
-            assert(conf_name.startswith("conf"))
-            line = config_file.readline().strip()
-
-            while line != "-----":
-                layer_as_lst = line.split(' ')
-                layer_results = []
-                # Skip softmax
-                if line.find("softmax") != -1:
-                    layer_results.append((0, 0, ' '.join(layer_as_lst[2:])))
-                    curr_conf_results.append((layer_as_lst[1], layer_results))
-                    line = config_file.readline().strip()
-                    continue
-                
-                layer_ind = int(layer_as_lst[0]) - 1
-                layer_table_data = self.__tensor_layers[layer_ind]
-                layer_name = layer_table_data["Name"]
-
-                if Driver.is_promise(layer_as_lst[1]):
-                    print("Running layer %s on PROMISE" % layer_name)
-                    curr_layer = Driver.PrecisionTypes.PROMISE
-
-                    total_time = 0
-                    total_energy = 0
-
-                    # To support multiple sets of <param> <number> in the future
-                    for i in range(2, len(layer_as_lst), 2):
-                        param_name = layer_as_lst[i] # Use when there's more than 1 type of param 
-                        param_val = int(layer_as_lst[i + 1])
-                        time, energy = self.__run_promise_simulation(param_val, layer_table_data)
-                        total_time += time
-                        total_energy += energy
-                    layer_results.append((total_time, total_energy, ' '.join(layer_as_lst[2:])))
-
-                elif Driver.is_gpu(layer_as_lst[1]):
-                    print("Running layer %s on the GPU" % layer_name)
-
-                    tensor_count = 0 
-
-                    # 3 elements per tensor operation 
-                    for i in range(2, len(layer_as_lst), 3):
-                        op_type = layer_as_lst[i]
-                        precision_type = layer_as_lst[i + 1]
-                        op_number = layer_as_lst[i + 2]
-
-                        approx_type = None
-                        if line.find("fp16") != -1:
-                            curr_layer = Driver.PrecisionTypes.FP16
-                        elif line.find("fp32") != -1:
-                            curr_layer = Driver.PrecisionTypes.FP32
-
-                        if precision_type == "perf" or precision_type == "samp": # Handle approx type
-                            if precision_type == "perf": 
-                                approx_type = Driver.ApproxTypes.PERF
-                            elif precision_type == "samp": 
-                                approx_type = Driver.ApproxTypes.SAMP
-                            if line.find("fp16") != -1:
-                                curr_layer = Driver.PrecisionTypes.FP16
-                            elif line.find("fp32") != -1:
-                                curr_layer = Driver.PrecisionTypes.FP32
-                        quant_time, quant_energy = self.__quantize(op_type, precision_type, op_number, curr_layer, prev_layer, \
-                                    tensor_count, layer_table_data)
-                        if quant_time != 0:
-                            assert i == 2
-                        conv_time, conv_energy = self.__run_gpu_simulation(curr_layer, layer_name, \
-                                    tensor_count, approx_type, op_number) 
-                        print(quant_time, quant_energy, conv_time, conv_energy)
-                        layer_results.append((quant_time + conv_time, quant_energy + conv_energy, ' '.join(layer_as_lst[i : i + 3])))
-                        prev_layer = curr_layer
-                        tensor_count += 1
-
-                line = config_file.readline().strip()
-                prev_layer = curr_layer
-                curr_conf_results.append((layer_as_lst[1], layer_results))
-
-            self.__conf_results.append( (first_line, curr_conf_results) )
-            line = config_file.readline().strip()
-        config_file.close()
-
-
-    def __quantize(self, op_type, precision_type, op_number, curr_layer, prev_layer, h2f_f2h_operation_ind, layer_data):
-        if curr_layer == prev_layer or curr_layer == Driver.PrecisionTypes.PROMISE \
-                    or prev_layer == Driver.PrecisionTypes.PROMISE:
-            return 0.0, 0.0
-        print("IN QUANTIZE") 
-        layer_name = layer_data["Name"]
-
-        # NOTE: Ignoring logic where curr == promise or prev == promise bc 
-        # smartDMA is always true so we'd return near the beginning of the method
-
-        # Get h2f/f2h data using the first tensor operation in the layer
-        # (which is why order matters in the tensor table)
-        tensor_op_row = self.__tensor_table[layer_name][h2f_f2h_operation_ind]  
-        time_key = None
-        energy_key = None
-
-        print(precision_type, op_number)
-        if op_number == "1":
-            lookup_key = "_" #lookup_key = precision_type
-        else:
-            lookup_key = "_" + precision_type + str(op_number) + "_"
-
-        print("QUANT LOOKUP KEY", lookup_key)
-        if curr_layer == Driver.PrecisionTypes.FP32:
-            time_key = "h2f%stime" % lookup_key
-            energy_key = "h2f%senergy" % lookup_key
-        elif curr_layer == Driver.PrecisionTypes.FP16:
-            time_key = "f2h%stime" % lookup_key
-            energy_key = "f2h%senergy" % lookup_key
-        print(time_key, energy_key)
-        time = tensor_op_row[time_key]
-        energy = tensor_op_row[energy_key]
-        print("Quantization: (%f, %f)" % (time, energy))
-        return (time, energy)
-
-
-    def __run_promise_simulation(self, swing, layer_data):
-        layer_name = layer_data["Name"] 
-        patch_factor = 1 
-
-        if Driver.is_conv(layer_name): 
-            rows_a = layer_data["N"] * layer_data["H"] * layer_data["W"] \
-                    / (layer_data["Sh"] * layer_data["Sw"])
-            cols_a = layer_data["Cin"] * layer_data["Kh"] * layer_data["Kw"]
-            rows_b = cols_a
-            cols_b = layer_data["Cout"]
-            patch_factor = layer_data["Kh"] * layer_data["Kw"]
-        elif Driver.is_fc(layer_name):
-            rows_a = layer_data["RA"] 
-            cols_a = layer_data["CA"]
-            rows_b = cols_
-            cols_b = layer_data["CB"]
-        else:
-            print("PROMISE can't run whatever this layer is.")
-            exit(1)
-        # Run promise simulator
-        # TODO need to print time and energy in the ptm runner so we can pipe it
-        output = subprocess.Popen(["./ptm_new", str(rows_a), str(cols_a), str(rows_b), \
-                    str(cols_b), str(patch_factor), str(swing)], \
-                    stdout = subprocess.PIPE, stderr = subprocess.PIPE).communicate()[0]
-        total_time_energy = output.strip().split(',')
-
-        assert(len(total_time_energy) == 2)
-        return float(total_time_energy[0]), float(total_time_energy[1])
-
-
-    def __run_gpu_simulation(self, curr_layer, layer_name, tensor_ind, \
-                    approx_type = None, knob_number = None):
-        tensor_info = self.__tensor_table[layer_name][tensor_ind]
-        #print(tensor_info)
-        #print(layer_name)
-        #print(tensor_ind)
-        time_key = None
-        energy_key = None
-
-        if approx_type == Driver.ApproxTypes.PERF or approx_type == Driver.ApproxTypes.SAMP: # fp16_perf2_energy
-            approx_type_str = None
-            if approx_type == Driver.ApproxTypes.PERF:
-                approx_type_str = "perf"
-            elif approx_type == Driver.ApproxTypes.SAMP: 
-                approx_type_str = "samp"
-
-            if curr_layer == Driver.PrecisionTypes.FP32:
-                time_key = "fp32_%s%s_time" % (approx_type_str, knob_number)
-                energy_key = "fp32_%s%s_energy" % (approx_type_str, knob_number)
-
-            elif curr_layer == Driver.PrecisionTypes.FP16:
-                time_key = "fp16_%s%s_time" % (approx_type_str, knob_number)
-                energy_key = "fp16_%s%s_energy" % (approx_type_str, knob_number)
-
-        else: # None for now
-            if curr_layer == Driver.PrecisionTypes.FP32:
-                time_key = "fp32_time"
-                energy_key = "fp32_energy"
-
-            elif curr_layer == Driver.PrecisionTypes.FP16:
-                time_key = "fp16_time"
-                energy_key = "fp16_energy"
-        print(time_key, energy_key)
-        conversion_time = tensor_info[time_key]
-        conversion_energy = tensor_info[energy_key]
-        #print("GPU: (%f, %f)\n" % (conversion_time, conversion_energy))
-        return conversion_time, conversion_energy
-
-
-    def __write_output(self):
-        config_file = open(self.__config_filename, "r")
-        results_file = open(self.__results_filename, "w")
-
-        def write_conf_to_file(conf_name, final_conf, time_speedup, energy_speedup):
-            # conf = [layer value if promise], [tensor vals if gpu]]
-            conf_str = ["+++++"]
-
-            # process the first line
-            first_line, layers = final_conf
-            first_line_lst = first_line.split(' ')
-            assert first_line_lst[0] == conf_name
-            
-            new_header = [conf_name]
-            new_header.append(repr(time_speedup))
-            new_header.append(repr(energy_speedup))
-            new_header.append(repr(abs(float(first_line_lst[-2]))))
-            new_header.append(repr(abs(float(first_line_lst[-1]))))
-            conf_str.append(' '.join(new_header))
-            
-            for ind, (hardware, layer) in enumerate(layers):
-                print(layer)
-                layer_lst = [str(ind + 1)]
-                layer_lst.append(hardware)
-                print(layer_lst)
-                for op_time, op_energy, tensor_op in layer:
-                    layer_lst.append(tensor_op) 
-                conf_str.append(' '.join(layer_lst))
-            conf_str.append("-----\n")
-            results_file.write('\n'.join(conf_str))
-
-        baseline_conf = None
-        baseline_total_time = baseline_total_energy = 0 
-
-        def get_baseline_times_energies(conf):
-            curr_time = curr_energy = 0
-            print("RESULTS: ", conf[1])
-            for hardware, layer in conf[1]:
-                for op_time, op_energy, tensor_op in layer:
-                    curr_time += op_time
-                    curr_energy += op_energy
-            return curr_time, curr_energy
-
-        def get_final_times_energies_conf(curr_conf):
-            final_time = final_energy = 0
-           
-            final_conf = [] # List (conf) of lists (layers) of tuples (operation data)
-
-            for layer_ind, (hardware, layer) in enumerate(curr_conf[1]):
-                final_conf_layer = []
-
-                for tensor_ind, (op_time, op_energy, tensor_op) in enumerate(layer):
-                    baseline_time, baseline_energy, baseline_op = baseline_conf[1][layer_ind][tensor_ind]
-                    final_tensor_op = tensor_op
-                    if op_time > baseline_time:
-                        print("**************** BIGGER ******************")
-                        final_time += baseline_time
-                        final_energy += baseline_energy
-                        final_tensor_op = baseline_op
-                    else:
-                        final_time += op_time
-                        final_energy += op_energy
-                    '''
-                    # Ignoring bigger energies for now  
-                    if op_energy > baseline_energy:
-                        final_time += baseline_energy
-                        final_energy += baseline_energy
-                        final_tensor_op = baseline_op
-                    else:
-                        final_time += op_time
-                        final_energy += op_energy
-                    '''
-                    final_conf_layer.append((None, None, final_tensor_op)) # Don't care about the times and energies when writing
-                final_conf.append(final_conf_layer)
-            return final_time, final_energy, (curr_conf[0], final_conf) 
-
-        conf_index = 0
-        print("RESULTS")
-        for line in config_file:
-            if line.startswith("conf"):
-                orig_line_lst = line.split(' ')
-                conf_name = orig_line_lst[0]
-
-                if not baseline_conf:
-                    baseline_conf = self.__conf_results[conf_index] #conf_name]
-                    baseline_total_time, baseline_total_energy = get_baseline_times_energies(baseline_conf)
-                    results_file.write("%s\n" % repr(baseline_total_time))
-                    write_conf_to_file(conf_name, baseline_conf, 1, 1)
-                else:
-                    curr_conf = self.__conf_results[conf_index] #conf_name]
-                    final_time, final_energy = get_baseline_times_energies(curr_conf)
-                    write_conf_to_file(conf_name, curr_conf, baseline_total_time / final_time, baseline_total_energy / final_energy) 
-                conf_index += 1
-        results_file.close()
-        config_file.close()
-
-if __name__ == "__main__":
-    if len(sys.argv) != 5:
-        print("Usage: python driver.py <layer info> <tensor info> <configurations> <results file>")
-        exit(1)
-    Driver(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4]).driver()
diff --git a/llvm/projects/soc_simulator/src/ext_test b/llvm/projects/soc_simulator/src/ext_test
deleted file mode 100755
index 5c120650ec8efd65d57d15ea93092c80463b6e28..0000000000000000000000000000000000000000
Binary files a/llvm/projects/soc_simulator/src/ext_test and /dev/null differ
diff --git a/llvm/projects/soc_simulator/src/fp16_emu.cpp b/llvm/projects/soc_simulator/src/fp16_emu.cpp
deleted file mode 100644
index 81a541f31f7ce97d992181448c0174fbf032f7cb..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/fp16_emu.cpp
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright 1993-2014 NVIDIA Corporation.  All rights reserved.
- *
- * NOTICE TO LICENSEE:
- *
- * This source code and/or documentation ("Licensed Deliverables") are
- * subject to NVIDIA intellectual property rights under U.S. and
- * international Copyright laws.
- *
- * These Licensed Deliverables contained herein is PROPRIETARY and
- * CONFIDENTIAL to NVIDIA and is being provided under the terms and
- * conditions of a form of NVIDIA software license agreement by and
- * between NVIDIA and Licensee ("License Agreement") or electronically
- * accepted by Licensee.  Notwithstanding any terms or conditions to
- * the contrary in the License Agreement, reproduction or disclosure
- * of the Licensed Deliverables to any third party without the express
- * written consent of NVIDIA is prohibited.
- *
- * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
- * LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE
- * SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE.  IT IS
- * PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
- * NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED
- * DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY,
- * NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
- * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
- * LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY
- * SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY
- * DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
- * WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
- * ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
- * OF THESE LICENSED DELIVERABLES.
- *
- * U.S. Government End Users.  These Licensed Deliverables are a
- * "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT
- * 1995), consisting of "commercial computer software" and "commercial
- * computer software documentation" as such terms are used in 48
- * C.F.R. 12.212 (SEPT 1995) and is provided to the U.S. Government
- * only as a commercial end item.  Consistent with 48 C.F.R.12.212 and
- * 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all
- * U.S. Government End Users acquire the Licensed Deliverables with
- * only those rights set forth herein.
- *
- * Any use of the Licensed Deliverables in individual and commercial
- * software must include, in the user documentation and internal
- * comments to the code, the above Disclaimer and U.S. Government End
- * Users Notice.
- */
- 
-#include "fp16_emu.h" 
-
-#define STATIC_ASSERT(cond) do { typedef char compile_time_assert[(cond) ? 1 : -1]; } while (0)
-
-// Host functions for converting between FP32 and FP16 formats
-// Paulius Micikevicius (pauliusm@nvidia.com)
-
-half1 cpu_float2half_rn(float f)
-{
-    unsigned x = *((int*)(void*)(&f));
-    unsigned u = (x & 0x7fffffff), remainder, shift, lsb, lsb_s1, lsb_m1;
-    unsigned sign, exponent, mantissa;
-
-    __half_raw hr;
-
-    // Get rid of +NaN/-NaN case first.
-    if (u > 0x7f800000) {
-        hr.x = 0x7fffU;
-        return reinterpret_cast<half1&>(hr);
-    }
-  
-    sign = ((x >> 16) & 0x8000);
-  
-    // Get rid of +Inf/-Inf, +0/-0.
-    if (u > 0x477fefff) {
-        hr.x = sign | 0x7c00U;
-        return reinterpret_cast<half1&>(hr);
-    }
-    if (u < 0x33000001) {
-        hr.x = sign | 0x0000U;
-        return reinterpret_cast<half1&>(hr);
-    }
-
-    exponent = ((u >> 23) & 0xff);
-    mantissa = (u & 0x7fffff);
-
-    if (exponent > 0x70) {
-        shift = 13;
-        exponent -= 0x70;
-    } else {
-        shift = 0x7e - exponent;
-        exponent = 0;
-        mantissa |= 0x800000;
-    }
-    lsb = (1 << shift);
-    lsb_s1 = (lsb >> 1);
-    lsb_m1 = (lsb - 1);
-  
-    // Round to nearest even.
-    remainder = (mantissa & lsb_m1);
-    mantissa >>= shift;
-    if (remainder > lsb_s1 || (remainder == lsb_s1 && (mantissa & 0x1))) {
-        ++mantissa;
-        if (!(mantissa & 0x3ff)) {
-            ++exponent;
-            mantissa = 0;
-        }
-    }  
-
-    hr.x = (sign | (exponent << 10) | mantissa);  
-
-    return reinterpret_cast<half1&>(hr);
-}
-
-
-float cpu_half2float(half1 h)
-{
-    STATIC_ASSERT(sizeof(int) == sizeof(float));
-
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-
-    unsigned sign     = ((hr.x >> 15) & 1);
-    unsigned exponent = ((hr.x >> 10) & 0x1f);
-    unsigned mantissa = ((hr.x & 0x3ff) << 13);
-
-    if (exponent == 0x1f) {  /* NaN or Inf */
-        mantissa = (mantissa ? (sign = 0, 0x7fffff) : 0);
-        exponent = 0xff;
-    } else if (!exponent) {  /* Denorm or Zero */
-        if (mantissa) {
-            unsigned int msb;
-            exponent = 0x71;
-            do {
-                msb = (mantissa & 0x400000);
-                mantissa <<= 1;  /* normalize */
-                --exponent;
-            } while (!msb);
-            mantissa &= 0x7fffff;  /* 1.mantissa is implicit */
-        }
-    } else {
-        exponent += 0x70;
-    }
-
-    int temp = ((sign << 31) | (exponent << 23) | mantissa);
-
-    return reinterpret_cast<float&>(temp);
-}
-
diff --git a/llvm/projects/soc_simulator/src/fp16_emu.h b/llvm/projects/soc_simulator/src/fp16_emu.h
deleted file mode 100644
index 8aef176b92e9d598e117a61098532e2190cf2554..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/fp16_emu.h
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright 1993-2014 NVIDIA Corporation.  All rights reserved.
- *
- * NOTICE TO LICENSEE:
- *
- * This source code and/or documentation ("Licensed Deliverables") are
- * subject to NVIDIA intellectual property rights under U.S. and
- * international Copyright laws.
- *
- * These Licensed Deliverables contained herein is PROPRIETARY and
- * CONFIDENTIAL to NVIDIA and is being provided under the terms and
- * conditions of a form of NVIDIA software license agreement by and
- * between NVIDIA and Licensee ("License Agreement") or electronically
- * accepted by Licensee.  Notwithstanding any terms or conditions to
- * the contrary in the License Agreement, reproduction or disclosure
- * of the Licensed Deliverables to any third party without the express
- * written consent of NVIDIA is prohibited.
- *
- * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
- * LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE
- * SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE.  IT IS
- * PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
- * NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED
- * DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY,
- * NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
- * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
- * LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY
- * SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY
- * DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
- * WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
- * ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
- * OF THESE LICENSED DELIVERABLES.
- *
- * U.S. Government End Users.  These Licensed Deliverables are a
- * "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT
- * 1995), consisting of "commercial computer software" and "commercial
- * computer software documentation" as such terms are used in 48
- * C.F.R. 12.212 (SEPT 1995) and is provided to the U.S. Government
- * only as a commercial end item.  Consistent with 48 C.F.R.12.212 and
- * 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all
- * U.S. Government End Users acquire the Licensed Deliverables with
- * only those rights set forth herein.
- *
- * Any use of the Licensed Deliverables in individual and commercial
- * software must include, in the user documentation and internal
- * comments to the code, the above Disclaimer and U.S. Government End
- * Users Notice.
- */
-
-// Conversion from/to 16-bit floating point (half-precision).
-
-#if !defined(_FP16_EMU_H_)
-#define _FP16_EMU_H_
-
-#include <driver_types.h>
-#include <cuda_fp16.h>
-
-// Necessary to ensure visibility of CUDART_VERSION macro
-#include <cuda_runtime_api.h>
-
-// Definition of '__half_raw' was not provided before CUDA 9.0.
-// '__half_raw' is our type where the unsigned 16-bit integer 
-// data member 'x' can be accessed in both CUDA 9.0 and 8.0.
-#if CUDART_VERSION < 9000 
-typedef __half __half_raw;
-#endif
-
-// Internally, in CUDNN we use half1 struct as the FP16 type.
-typedef __half half1;
-
-#define HLF_EPSILON 4.887581E-04
-#define HLF_MIN     6.103516E-05
-#define HLF_MAX     6.550400E+04
-
-half1 cpu_float2half_rn(float f);
-
-float cpu_half2float(half1 h);
-
-static __inline__ __device__ __host__ half1 habs(half1 h)
-{
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-    hr.x &= 0x7fffU;
-    return reinterpret_cast<half1&>(hr);
-}
-
-static __inline__ __device__ __host__ half1 hneg(half1 h)
-{
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-    hr.x ^= 0x8000U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-static __inline__ __device__ __host__ int ishnan(half1 h)
-{
-    // When input is NaN, exponent is all ones and mantissa is non-zero.
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-    return (hr.x & 0x7c00U) == 0x7c00U && (hr.x & 0x03ffU) != 0;
-}
-
-static __inline__ __device__ __host__ int ishinf(half1 h)
-{
-    // When input is +/- inf, exponent is all ones and mantissa is zero.
-    __half_raw hr = reinterpret_cast<__half_raw&>(h);
-    return (hr.x & 0x7c00U) == 0x7c00U && (hr.x & 0x03ffU) == 0;
-}
-
-static __inline__ __device__ __host__ int ishequ(half1 x, half1 y)
-{
-    __half_raw xr = reinterpret_cast<__half_raw&>(x);
-    __half_raw yr = reinterpret_cast<__half_raw&>(y);
-    return ishnan(x) == 0 && ishnan(y) == 0 && xr.x == yr.x;
-}
-
-// Returns 0.0000 in FP16 binary form
-static __inline__ __device__ __host__ half1 hzero()
-{
-    __half_raw hr;
-    hr.x = 0x0000U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-// Returns 1.0000 in FP16 binary form
-static __inline__ __device__ __host__ half1 hone()
-{
-    __half_raw hr;
-    hr.x = 0x3c00U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-// Returns quiet NaN, the most significant fraction bit #9 is set
-static __inline__ __device__ __host__ half1 hnan()
-{
-    __half_raw hr;
-    hr.x = 0x7e00U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-// Largest positive FP16 value, corresponds to 6.5504e+04
-static __inline__ __device__ __host__ half1 hmax()
-{
-    // Exponent all ones except LSB (0x1e), mantissa is all ones (0x3ff)
-    __half_raw hr;
-    hr.x = 0x7bffU;
-    return reinterpret_cast<half1&>(hr);
-}
-
-// Smallest positive (normalized) FP16 value, corresponds to 6.1035e-05
-static __inline__ __device__ __host__ half1 hmin()
-{
-    // Exponent is 0x01 (5 bits), mantissa is all zeros (10 bits)
-    __half_raw hr;
-    hr.x = 0x0400U;
-    return reinterpret_cast<half1&>(hr);
-}
-
-#endif  // _FP16_EMU_H_
-
diff --git a/llvm/projects/soc_simulator/src/gemm b/llvm/projects/soc_simulator/src/gemm
deleted file mode 100755
index 8730e084e099f740ad03cb3457862363579f76fe..0000000000000000000000000000000000000000
Binary files a/llvm/projects/soc_simulator/src/gemm and /dev/null differ
diff --git a/llvm/projects/soc_simulator/src/gemm.cu b/llvm/projects/soc_simulator/src/gemm.cu
deleted file mode 100644
index 040a8dbba973d89f2a6ac30b9f38864f1ba9d42e..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/gemm.cu
+++ /dev/null
@@ -1,255 +0,0 @@
-#include <iostream>
-#include <fstream>
-#include <string>
-#include <chrono>
-#include <cublas_v2.h>
-#include <cuda_fp16.h>
-#include "fp16_emu.h"
-
-#define NUM_ARGS (5)
-
-inline cudaError_t checkCuda(cudaError_t result) {
-    if (result != cudaSuccess)
-        std::cerr << "CUDA Runtime Error: " << cudaGetErrorString(result) << "\n";
-    return result;
-}
-
-inline cublasStatus_t checkCublas(cublasStatus_t result) {
-    if (result != CUBLAS_STATUS_SUCCESS)
-        std::cerr << "cuBLAS Error: " << result << "\n";
-    return result;
-}
-
-template <typename T>
-inline void printArray(const T * const __restrict__ array,
-                       const unsigned elements) {
-    for (unsigned i = 0; i < elements; i++)
-        std::cout << std::to_string(array[i]) << "\n";
-}
-
-// initialization
-template <typename T>
-__global__ void initKernel(T * const __restrict__ array,
-                           const unsigned elements) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        array[idx] = 1.2;
-}
-
-template <typename T>
-void init(T * const __restrict__ array,
-          const unsigned elements) {
-    const unsigned block_size = 512;
-    const unsigned num_blocks = (elements + block_size - 1) / block_size;
-    initKernel<<<num_blocks, block_size>>>(array, elements);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// float to half
-__global__ void f2hKernel(const float * const __restrict__ input,
-                          const unsigned elements,
-                          half * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = __float2half_rn(input[idx]);
-}
-
-void f2h(const float * const __restrict__ input,
-         const unsigned elements,
-         half * const __restrict__ output) {
-    const unsigned block_size = 512;
-    const unsigned num_blocks = (elements + block_size - 1) / block_size;
-    f2hKernel<<<num_blocks, block_size>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// half to float
-__global__ void h2fKernel(const half * const __restrict__ input,
-                          const unsigned elements,
-                          float * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = __half2float(input[idx]);
-}
-
-void h2f(const half * const __restrict__ input,
-         const unsigned elements,
-         float * const __restrict__ output) {
-    const unsigned block_size = 512;
-    const unsigned num_blocks = (elements + block_size - 1) / block_size;
-    h2fKernel<<<num_blocks, block_size>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-void sgemm(const float * const __restrict__ a,
-           const unsigned num_rows_a,
-           const unsigned num_cols_a,
-           const float * const __restrict__ b,
-           const unsigned num_rows_b,
-           const unsigned num_cols_b,
-           float * const __restrict__ c) {
-    std::chrono::time_point<std::chrono::high_resolution_clock> begin;
-    std::chrono::time_point<std::chrono::high_resolution_clock> end;
-    std::ofstream ofs("profile_data.txt", std::ios::out);
-
-    cublasHandle_t handle;
-    checkCublas(cublasCreate(&handle));
-
-    // Enable Tensor Cores
-    checkCublas(cublasSetMathMode(handle, CUBLAS_TENSOR_OP_MATH));
-
-    const float alpha_ = 1.0;
-    const float beta_  = 0.0;
-    const float *alpha = &alpha_;
-    const float *beta  = &beta_;
-
-    begin = std::chrono::high_resolution_clock::now();
-    checkCublas(cublasGemmEx(handle,
-                             CUBLAS_OP_N,
-                             CUBLAS_OP_N,
-                             // Dimensions
-                             num_rows_a,
-                             num_cols_b,
-                             num_cols_a,
-                             alpha,
-                             // A
-                             a,
-                             CUDA_R_32F,
-                             num_rows_a,
-                             // B
-                             b,
-                             CUDA_R_32F,
-                             num_rows_b,
-                             beta,
-                             // C
-                             c,
-                             CUDA_R_32F,
-                             num_rows_a,
-                             // Compute precision and algorithm
-                             CUDA_R_32F,
-                             CUBLAS_GEMM_DEFAULT_TENSOR_OP));
-    checkCuda(cudaDeviceSynchronize());
-    end = std::chrono::high_resolution_clock::now();
-
-    ofs << "FP32_GEMM" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-    ofs << "FP32_GEMM" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-    ofs.flush();
-    ofs.close();
-}
-
-void hgemm(const float * const __restrict__ af,
-           const unsigned num_rows_a,
-           const unsigned num_cols_a,
-           const float * const __restrict__ bf,
-           const unsigned num_rows_b,
-           const unsigned num_cols_b,
-           float * const __restrict__ cf) {
-    std::chrono::time_point<std::chrono::high_resolution_clock> begin;
-    std::chrono::time_point<std::chrono::high_resolution_clock> end;
-    std::ofstream ofs("profile_data.txt", std::ios::out);
-
-    const unsigned num_elements_a = num_rows_a * num_cols_a;
-    const unsigned num_elements_b = num_rows_b * num_cols_b;
-    const unsigned num_elements_c = num_rows_a * num_cols_b;
-
-    half *a;
-    half *b;
-    half *c;
-
-    checkCuda(cudaMallocManaged(&a, sizeof(half) * num_elements_a));
-    checkCuda(cudaMallocManaged(&b, sizeof(half) * num_elements_b));
-    checkCuda(cudaMallocManaged(&c, sizeof(half) * num_elements_c));
-
-    init(a, num_elements_a);
-    init(b, num_elements_b);
-    init(c, num_elements_c);
-
-    // Convert floats to halfs
-    f2h(af, num_elements_a, a);
-    f2h(bf, num_elements_b, b);
-
-    cublasHandle_t handle;
-    checkCublas(cublasCreate(&handle));
-    checkCublas(cublasSetMathMode(handle, CUBLAS_TENSOR_OP_MATH));
-
-    const half alpha_ = cpu_float2half_rn(1.0);
-    const half beta_  = cpu_float2half_rn(0.0);
-    const half *alpha = &alpha_;
-    const half *beta  = &beta_;
-
-    begin = std::chrono::high_resolution_clock::now();
-    checkCublas(cublasGemmEx(handle,
-                             CUBLAS_OP_N,
-                             CUBLAS_OP_N,
-                             // Dimensions
-                             num_rows_a,
-                             num_cols_b,
-                             num_cols_a,
-                             alpha,
-                             // A
-                             a,
-                             CUDA_R_16F,
-                             num_rows_a,
-                             // B
-                             b,
-                             CUDA_R_16F,
-                             num_rows_b,
-                             beta,
-                             // C
-                             c,
-                             CUDA_R_16F,
-                             num_rows_a,
-                             // Compute precision and algorithm
-                             CUDA_R_16F,
-                             CUBLAS_GEMM_DEFAULT_TENSOR_OP));
-    checkCuda(cudaDeviceSynchronize());
-    end = std::chrono::high_resolution_clock::now();
-
-    h2f(c, num_elements_c, cf);
-
-    ofs << "FP16_GEMM" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-    ofs << "FP16_GEMM" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-    ofs.flush();
-    ofs.close();
-}
-
-int main(int argc, char *argv[]) {
-    if (argc != NUM_ARGS) {
-        std::cerr << "Usage: " << argv[0] << " <RA> <CA> <CB> <precision>\n";
-        exit(1);
-    }
-
-    // Inputs
-    const unsigned num_rows_a = std::atoi(argv[1]);
-    const unsigned num_cols_a = std::atoi(argv[2]);
-    const unsigned num_rows_b = num_cols_a;
-    const unsigned num_cols_b = std::atoi(argv[3]);
-    const std::string precision(argv[4]);
-
-    const unsigned num_elements_a = num_rows_a * num_cols_a;
-    const unsigned num_elements_b = num_rows_b * num_cols_b;
-    const unsigned num_elements_c = num_rows_a * num_cols_b;
-
-    float *a;
-    float *b;
-    float *c;
-
-    checkCuda(cudaMallocManaged(&a, sizeof(float) * num_elements_a));
-    checkCuda(cudaMallocManaged(&b, sizeof(float) * num_elements_b));
-    checkCuda(cudaMallocManaged(&c, sizeof(float) * num_elements_c));
-
-    init(a, num_elements_a);
-    init(b, num_elements_b);
-    init(c, num_elements_c);
-
-    if (precision == "fp32")
-        sgemm(a, num_rows_a, num_cols_a, b, num_rows_b, num_cols_b, c);
-    else
-        hgemm(a, num_rows_a, num_cols_a, b, num_rows_b, num_cols_b, c);
-
-    checkCuda(cudaFree(a));
-    checkCuda(cudaFree(b));
-    checkCuda(cudaFree(c));
-
-    return 0;
-}
diff --git a/llvm/projects/soc_simulator/src/hardware_knobs.pl b/llvm/projects/soc_simulator/src/hardware_knobs.pl
deleted file mode 100755
index fac6b6c748a706cd397040546fa551b72ac14f3c..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/hardware_knobs.pl
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-# Do a GEMM: [5000 x K] x [K x 256], where K = {128, 256, ..., 16384}
-my $rows_a = 5000;
-my $cols_b = 1024;
-my @K;
-for (my $i = 8; $i <= 15; $i++) {
-    push @K, 2**$i;
-}
-
-# Header
-foreach my $element (@K) {
-    print $element . ",";
-}
-print "\n\n";
-
-print "########## PROMISE 1 #########\n";
-
-# Time
-foreach my $element (@K) {
-    my ($time, $energy) = promise($rows_a, $element, $element, $cols_b, 1);
-    print $time . ",";
-}
-print "\n\n";
-
-# Energy
-foreach my $element (@K) {
-    my ($time, $energy) = promise($rows_a, $element, $element, $cols_b, 1);
-    print $energy . ",";
-}
-print "\n\n";
-
-print "########## PROMISE 7 #########\n";
-
-# Time
-foreach my $element (@K) {
-    my ($time, $energy) = promise($rows_a, $element, $element, $cols_b, 7);
-    print $time . ",";
-}
-print "\n\n";
-
-# Energy
-foreach my $element (@K) {
-    my ($time, $energy) = promise($rows_a, $element, $element, $cols_b, 7);
-    print $energy . ",";
-}
-print "\n\n";
-
-print "########## FP32 #########\n";
-
-foreach my $element (@K) {
-    my ($time, $energy) = gpu($rows_a, $element, $element, $cols_b, "fp32");
-    print $time . ",";
-}
-print "\n\n";
-
-# Energy
-foreach my $element (@K) {
-    my ($time, $energy) = gpu($rows_a, $element, $element, $cols_b, "fp32");
-    print $energy . ",";
-}
-print "\n\n";
-
-print "########## FP16 #########\n";
-
-foreach my $element (@K) {
-    my ($time, $energy) = gpu($rows_a, $element, $element, $cols_b, "fp16");
-    print $time . ",";
-}
-print "\n\n";
-
-# Energy
-foreach my $element (@K) {
-    my ($time, $energy) = gpu($rows_a, $element, $element, $cols_b, "fp16");
-    print $energy . ",";
-}
-print "\n\n";
-
-# Cleanup
-`rm -f blah profile_data.txt`;
-
-# PROMISE
-sub promise {
-    my ($rows_a, $cols_a, $rows_b, $cols_b, $swing) = @_;
-    my $patch_factor = 1;
-
-    my $te = `./ptm $rows_a $cols_a $rows_b $cols_b $patch_factor $swing`;
-    chomp $te;
-    my @temp = split /,/, $te;
-    return ($temp[0], $temp[1]);
-}
-
-# GPU
-sub gpu {
-    my ($rows_a, $cols_a, $rows_b, $cols_b, $precision) = @_;
-    my $iterations = 10;
-    my $te = `~/awesome_profiler/pp "./gemm $rows_a $cols_a $cols_b $precision" $iterations blah`;
-    chomp $te;
-    my @temp = split /,/, $te;
-    return ($temp[0], $temp[1]);
-}
diff --git a/llvm/projects/soc_simulator/src/patch b/llvm/projects/soc_simulator/src/patch
deleted file mode 100755
index 94d04d5ba8a88bdd4fb1cbb907f18c419ba248bd..0000000000000000000000000000000000000000
Binary files a/llvm/projects/soc_simulator/src/patch and /dev/null differ
diff --git a/llvm/projects/soc_simulator/src/patch.cu b/llvm/projects/soc_simulator/src/patch.cu
deleted file mode 100644
index 6d489978a24f828cc370cdb6e2add64877c514a7..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/patch.cu
+++ /dev/null
@@ -1,292 +0,0 @@
-#include <iostream>
-#include <fstream>
-#include <chrono>
-
-// NOTE: This benchmark was originally meant to measure the performance of
-// patch matrix generation on the GPU. However, it is now used for measuring
-// both performance and energy of the actual patch matrix generation that needs
-// to happen before offloading the computation to PROMISE.
-// In order to do so, we assume that initially only that tile of the patch
-// matrix is generated that PROMISE will actually work on, and the generation
-// of the remaining tiles can be pipelined with PROMISE's computation.  This
-// has two implications:
-// 1. We only add the time for the first tile's generation. This means that
-// this program has to be profiled with the appropriate smaller batch size to
-// obtain the *time* overhead.
-// 2. Since pipelining doesn't affect energy, the program has to be profiled
-// with the full batch size to obtain the *energy* overhead.
-
-#define NUM_ARGS (9)
-#define BLOCK_SIZE (512)
-
-struct image_dim {
-    unsigned n;
-    unsigned cin;
-    unsigned h;
-    unsigned w;
-};
-
-struct kernel_dim {
-    unsigned cout;
-    unsigned cin;
-    unsigned h;
-    unsigned w;
-};
-
-inline cudaError_t checkCuda(cudaError_t result) {
-    if (result != cudaSuccess)
-        std::cerr << "CUDA Runtime Error: " << cudaGetErrorString(result) << "\n";
-    return result;
-}
-
-// init kernel to bring all the pages to the GPU
-template <typename T>
-__global__ void initKernel(T * const __restrict__ array,
-                           const unsigned elements) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        array[idx] = 1;
-}
-
-template <typename T>
-void init(T * const __restrict__ array,
-          const unsigned elements) {
-    const unsigned num_blocks = (elements + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    initKernel<<<num_blocks, BLOCK_SIZE>>>(array, elements);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// Patches input matrix
-template <typename T>
-__global__ void patchInputKernel(const T * const __restrict__ input,
-                                 const image_dim idim,
-                                 const kernel_dim kdim,
-                                 T * const __restrict__ patch_input,
-                                 const unsigned patch_rows,
-                                 const unsigned patch_cols,
-                                 const unsigned patch_n) {
-    // Coalesced writes ftw
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < patch_n) {
-        // Index of output
-        const unsigned patch_col = idx % patch_cols;
-        const unsigned patch_row = idx / patch_cols;
-
-        // Index of the source point in the input image batch
-        const unsigned col_idx = patch_row % idim.w;
-        const unsigned row_idx = patch_row / idim.w;
-
-        // Index of my point in the (kh * kw * cin) kernel block. kw and kh are
-        // flipped because I've assumed the source point lies at the bottom
-        // right of the kernel cube and not the top left.
-        const int kw_idx = kdim.w - (patch_col % kdim.w) - 1;
-        const int kh_idx = kdim.h - ((patch_col / kdim.w) % kdim.h) - 1;
-        const int kc_idx = patch_col / (kdim.w * kdim.h);
-
-        // Index of the "kernel point" in the input image batch
-        const int w_idx = col_idx - kw_idx;
-        const int h_idx = row_idx - kh_idx;
-        const int c_idx = kc_idx;
-
-        // Are we inside the input cube?
-        const bool inside = (w_idx >= 0 && h_idx >= 0);
-        //printf("thread %d: dst (%u, %u); src point (%u, %u); kernel (%d, %d, %d); input (%d, %d, %d); inside %d\n", idx, patch_row, patch_col, row_idx, col_idx, kc_idx, kh_idx, kw_idx, c_idx, h_idx, w_idx, inside);
-
-        if (inside)
-            patch_input[idx] = input[(c_idx * idim.w * idim.h) + (h_idx * idim.w) + w_idx];
-        else
-            patch_input[idx] = 0;
-    }
-}
-
-template <typename T>
-void patchInput(const T * const __restrict__ input,
-                const image_dim idim,
-                const kernel_dim kdim,
-                T * const __restrict__ patch_input) {
-    const auto patch_rows = (idim.n * idim.h * idim.w) / sizeof(unsigned);
-    const auto patch_cols = kdim.cin * kdim.h * kdim.w;
-    const auto patch_n = patch_rows * patch_cols;
-
-    const unsigned num_blocks = (patch_n + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    patchInputKernel<<<num_blocks, BLOCK_SIZE>>>(input, idim, kdim, patch_input, patch_rows, patch_cols, patch_n);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// Unpatches output matrix
-template <typename T>
-__global__ void unpatchOutputKernel(const T * const __restrict__ patch_output,
-                                    const unsigned patch_rows,
-                                    const unsigned patch_cols,
-                                    const unsigned patch_n,
-                                    T * const __restrict__ output,
-                                    const image_dim odim) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < patch_n) {
-        // Read index in the patch matrix
-        const unsigned image_size = odim.w * odim.h * odim.cin;
-        const unsigned image_id  = idx / image_size;
-        const unsigned patch_row = (image_id * odim.w * odim.h) + (idx % (odim.w * odim.h));
-        const unsigned patch_col = (idx / (odim.w * odim.h)) % odim.cin;
-        //printf("thread %d: src (%u, %u)\n", idx, patch_row, patch_col);
-
-        // Coalesced writes ftw
-        output[idx] = patch_output[(patch_row * patch_cols) + patch_col];
-    }
-}
-
-template <typename T>
-void unpatchOutput(const T * const __restrict__ patch_output,
-                   T * const __restrict__ output,
-                   const image_dim odim) {
-    const auto patch_rows = (odim.n * odim.h * odim.w) / sizeof(unsigned);
-    const auto patch_cols = odim.cin;
-    const auto patch_n = patch_rows * patch_cols;
-
-    const unsigned num_blocks = (patch_n + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    unpatchOutputKernel<<<num_blocks, BLOCK_SIZE>>>(patch_output, patch_rows, patch_cols, patch_n, output, odim);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// Patches kernel matrix
-template <typename T>
-__global__ void patchKernelKernel(const T * const __restrict__ kernel,
-                                  T * const __restrict__ patch_kernel,
-                                  const unsigned patch_rows,
-                                  const unsigned patch_cols,
-                                  const unsigned patch_n) {
-    // There are two ways to implement this: coalesced reads or coalesced
-    // writes. Empirically, coalesced writes is about 3x faster and that's
-    // what's used here. This is most likely because the coalesced reads
-    // version results in different thread blocks writing to the same line
-    // while presumably being on different cores. This results in ping-ponging
-    // of the line, which hurts performance. The strided reads in the coalesced
-    // writes version are just reads - they can be shared as many times as
-    // needed without penalty.
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < patch_n) {
-        const unsigned col_idx = idx % patch_cols;
-        const unsigned row_idx = idx / patch_cols;
-        patch_kernel[idx] = kernel[(col_idx * patch_rows) + row_idx];
-    }
-}
-
-template <typename T>
-void patchKernel(const T * const __restrict__ kernel,
-                 const kernel_dim kdim,
-                 T * const __restrict__ patch_kernel) {
-    const auto patch_rows = kdim.cin * kdim.h * kdim.w;
-    const auto patch_cols = kdim.cout;
-    const auto patch_n = patch_rows * patch_cols;
-
-    const unsigned num_blocks = (patch_n + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    patchKernelKernel<<<num_blocks, BLOCK_SIZE>>>(kernel, patch_kernel, patch_rows, patch_cols, patch_n);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-int main(int argc, char *argv[]) {
-    if (argc != NUM_ARGS) {
-        std::cerr << "Usage: " << argv[0] << " <N> <C> <H> <W> <Cout> <Kh> <Kw> <patch/unpatch>\n";
-        exit(1);
-    }
-
-    // Inputs
-    const unsigned n = std::atoi(argv[1]);
-    const unsigned c = std::atoi(argv[2]);
-    const unsigned h = std::atoi(argv[3]);
-    const unsigned w = std::atoi(argv[4]);
-
-    const unsigned cout = std::atoi(argv[5]);
-    const unsigned kh = std::atoi(argv[6]);
-    const unsigned kw = std::atoi(argv[7]);
-
-    const std::string type(argv[8]);
-
-    const image_dim idim = {n, c, h, w};
-    const kernel_dim kdim = {cout, c, kh, kw};
-    const image_dim odim = {n, cout, h, w};
-
-    std::chrono::time_point<std::chrono::high_resolution_clock> begin;
-    std::chrono::time_point<std::chrono::high_resolution_clock> end;
-    std::ofstream ofs("profile_data.txt", std::ios::out);
-
-    if (type == "patch") {
-        const auto patch_rows = (idim.n * idim.h * idim.w) / sizeof(unsigned);
-        const auto patch_cols = kdim.cin * kdim.h * kdim.w;
-        const auto patch_n = patch_rows * patch_cols;
-
-        unsigned *input;
-        unsigned *output;
-
-        checkCuda(cudaMallocManaged(&input, sizeof(unsigned) * patch_n));
-        checkCuda(cudaMallocManaged(&output, sizeof(unsigned) * patch_n));
-
-        init(input, patch_n);
-        init(output, patch_n);
-
-        begin = std::chrono::high_resolution_clock::now();
-        patchInput(input, idim, kdim, output);
-        end = std::chrono::high_resolution_clock::now();
-
-        checkCuda(cudaFree(input));
-        checkCuda(cudaFree(output));
-
-        ofs << "Patch_Input" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-        ofs << "Patch_Input" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-    } else if (type == "unpatch") {
-        const auto patch_rows = (odim.n * odim.h * odim.w) / sizeof(unsigned);
-        const auto patch_cols = odim.cin;
-        const auto patch_n = patch_rows * patch_cols;
-
-        unsigned *input;
-        unsigned *output;
-
-        checkCuda(cudaMallocManaged(&input, sizeof(unsigned) * patch_n));
-        checkCuda(cudaMallocManaged(&output, sizeof(unsigned) * patch_n));
-
-        init(input, patch_n);
-        init(output, patch_n);
-
-        begin = std::chrono::high_resolution_clock::now();
-        unpatchOutput(input, output, odim);
-        end = std::chrono::high_resolution_clock::now();
-
-        checkCuda(cudaFree(input));
-        checkCuda(cudaFree(output));
-
-        ofs << "Unpatch_Output" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-        ofs << "Unpatch_Output" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-    } else if (type == "kernel") {
-        const auto patch_rows = kdim.cin * kdim.h * kdim.w;
-        const auto patch_cols = kdim.cout;
-        const auto patch_n = patch_rows * patch_cols;
-
-        char *input;
-        char *output;
-
-        checkCuda(cudaMallocManaged(&input, sizeof(char) * patch_n));
-        checkCuda(cudaMallocManaged(&output, sizeof(char) * patch_n));
-
-        init(input, patch_n);
-        init(output, patch_n);
-
-        begin = std::chrono::high_resolution_clock::now();
-        patchKernel(input, kdim, output);
-        end = std::chrono::high_resolution_clock::now();
-
-        checkCuda(cudaFree(input));
-        checkCuda(cudaFree(output));
-
-        ofs << "Patch_Kernel" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-        ofs << "Patch_Kernel" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-    } else {
-        std::cerr << "Patch type not supported!\n";
-        exit(1);
-    }
-
-    ofs.close();
-    return 0;
-}
diff --git a/llvm/projects/soc_simulator/src/promise_timing_model.cpp b/llvm/projects/soc_simulator/src/promise_timing_model.cpp
deleted file mode 100644
index 87150f45a3a967443a947cf1b95b0e8d6fbae842..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/promise_timing_model.cpp
+++ /dev/null
@@ -1,374 +0,0 @@
-#include "promise_timing_model.h"
-
-// NOTE 1: This code uses seconds for all computations. Using clock cycles
-// would have been cleaner but it's not possible because we don't know anything
-// about Jetson's DRAM other than its bandwidth.
-
-// NOTE 2: All elements are assumed to be 1 byte long.
-
-// NOTE 3: PROMISE's frequency is fixed at 1 GHz and thus 1 cycle = 1 ns.
-
-Dram::Dram(const double latency, const double bandwidth)
-    : latency_(latency), bandwidth_(bandwidth) {}
-
-// Calculates (time, energy) of accessing 'num_bytes' in memory
-std::pair<double, double> Dram::access(const unsigned num_bytes, const bool pipeline) const{
-    const auto time = (pipeline ? 0.0 : latency_) + (static_cast<double>(num_bytes)/bandwidth_);
-    const auto energy = energy_per_bit * static_cast<double>(num_bytes * 8);
-    return std::make_pair(time, energy);
-}
-
-// Calculates the index within the scratchpad array
-unsigned Scratchpad::getIndex(const unsigned address) const {
-    return ((address >> log_line_size) & (num_lines_ - 1));
-}
-
-Scratchpad::Scratchpad(const bool enable,
-           const unsigned size,
-           const double dram_latency,
-           const double dram_bandwidth)
-    : enable_(enable), dram_(dram_latency, dram_bandwidth) {
-
-    num_lines_ = size / line_size;
-    lines_.resize(num_lines_);
-    clear();
-
-#ifdef DEBUG
-    if (enable_) {
-        std::cout << "Initialized " << (size / 1024) << " KB scratchpad "
-                  << "with geometry [" << num_lines_ << " x "
-                  << line_size << "]\n";
-    }
-#endif
-}
-
-// Clears the scratchpad
-void Scratchpad::clear() {
-    for (auto &x : lines_)
-        x = -1;
-}
-
-// Calculates (time, energy) of accessing 'num_bytes' starting from 'address'
-std::pair<double, double> Scratchpad::access(const unsigned address,
-                                 const unsigned num_bytes) {
-    if (!enable_) {
-        const auto load = dram_.access(num_bytes);
-#ifdef DEBUG
-        std::cout << "Accessing " << num_bytes << " bytes from DRAM\n";
-        std::cout << "Took " << std::to_string(load.first * 1e6) << " us and "
-                  << std::to_string(load.second * 1e6) << " uJ\n";
-#endif
-        return load;
-    }
-
-    auto addr = address;
-    int num_bytes_remaining = static_cast<int>(num_bytes);
-
-    double time = 0.0;
-    double energy = 0.0;
-
-    double hits = 0.0;
-    double accesses = 0.0;
-
-#ifdef DEBUG
-    std::cout << "Accessing " << num_bytes << " bytes from the scratchpad, "
-              << "starting at address " << addr << " (index "
-              << getIndex(addr) << ")\n";
-#endif
-
-    // Keep reading line by line until everything is read
-    while (num_bytes_remaining > 0) {
-        if (lines_[getIndex(addr)] == address) {
-            // Hit
-            hits++;
-        } else {
-            // We missed. Load the line from memory. If this is not the
-            // first miss, the accesses can be pipelined (overlapped).
-            const bool first_miss = (hits == accesses);
-            const auto miss = dram_.access(line_size, !first_miss);
-            time += miss.first;
-            energy += miss.second;
-
-            lines_[getIndex(addr)] = address;
-        }
-
-        // This is required in case we began in the middle of a line
-        const auto bytes_accessed = line_size - (addr & (line_size - 1));
-        addr += bytes_accessed;
-        num_bytes_remaining -= bytes_accessed;
-
-        time += line_latency;
-        energy += line_energy;
-        accesses++;
-    }
-
-#ifdef DEBUG
-    std::cout << "Took " << std::to_string(time * 1e6) << " us and "
-              << std::to_string(energy * 1e6) << " uJ\n";
-    std::cout << "Hit rate is " << ((hits * 100.0) / accesses) << "%\n";
-#endif
-    return std::make_pair(time, energy);
-}
-
-// uint version of min
-unsigned Promise::min(const unsigned x, const unsigned y) const {
-    return static_cast<unsigned>(std::min(x, y));
-}
-
-// Calculates energy of loading data into the SRAM
-double Promise::loadSRAM(const unsigned num_bytes) const {
-    return (sram_energy_per_byte * static_cast<double>(num_bytes));
-}
-
-// Calculates (time, energy) of computing 'num_elements' elements
-std::pair<double, double> Promise::compute(const unsigned num_elements, 
-                                  const unsigned voltage_swing) const {
-    const auto time = (pipeline_latency_ * static_cast<double>(num_elements)) + reduction_latency_;
-    const auto energy = compute_energy_per_dot[voltage_swing] * static_cast<double>(num_elements);
-    return std::make_pair(time, energy);
-}
-
-// Calculates the number of banks required to fill up an entire column; i.e. all the rows
-unsigned Promise::banksPerColumnTile(const unsigned num_rows) const {
-    return static_cast<unsigned>(std::ceil(static_cast<double>(num_rows) / static_cast<double>(bank_x_)));
-}
-
-// Calculates the number of fully filled column tiles
-unsigned Promise::activeColumnTiles(const unsigned num_rows, const unsigned remaining_columns) const {
-    const auto banks_per_column_tile = banksPerColumnTile(num_rows);
-    const auto remaining_column_tiles = static_cast<unsigned>(std::ceil(static_cast<double>(remaining_columns) / static_cast<double>(bank_y_)));
-    auto active_column_tiles = num_banks_ / banks_per_column_tile;
-    active_column_tiles = min(active_column_tiles, remaining_column_tiles);
-    return active_column_tiles;
-}
-
-// Calculates the number of rows of A that can be operated on in parallel
-// based on the tiling of *B*
-unsigned Promise::numRowsA(const unsigned num_rows, const unsigned num_cols) const {
-    const auto banks_per_column_tile = banksPerColumnTile(num_rows);
-    const auto total_column_tiles = static_cast<unsigned>(std::ceil(static_cast<double>(num_cols) / static_cast<double>(bank_y_)));
-    const auto total_required_banks = banks_per_column_tile * total_column_tiles;
-    const auto num_rows_a = num_banks_ < total_required_banks ? 1 : num_banks_ / total_required_banks;
-    return num_rows_a;
-}
-
-// Calculates (time, energy) of A x B (GEMM)
-std::pair<double, double> Promise::run(const unsigned num_rows_a,
-                        const unsigned num_cols_a,
-                        const unsigned num_rows_b,
-                        const unsigned num_cols_b, 
-                        const unsigned voltage_swing, 
-                        const unsigned patch_factor) {
-#ifdef DEBUG
-    std::cout << "Performing [" << num_rows_a << " x " << num_cols_a
-              << "] x [" << num_rows_b << " x " << num_cols_b << "] GEMM\n";
-#endif
-    scratch_.clear();
-
-    double compute_time = 0.0;
-    double compute_energy = 0.0;
-
-    double leakage_energy = 0.0;
-
-    double a_time = 0.0;
-    double a_energy = 0.0;
-
-    double b_time = 0.0;
-    double b_energy = 0.0;
-
-    double c_time = 0.0;
-    double c_energy = 0.0;
-
-    double average_bank_utilization;
-    double iterations;
-
-    // Load a tile of B, compute the corresponding part of C, repeat
-    auto remaining_columns_b = num_cols_b;
-    for (unsigned i = 0; i < num_cols_b;) {
-        // Figure out how B is tiled. In a nutshell, we use as many banks
-        // as will fill up entire columns of B (because we need an entire
-        // column for the reduction to work). The corner cases are where
-        // either #rows or #columns is not divisible by the bank size,
-        // and/or the banks only fill up part of the column. Once the
-        // tiling and #active banks is figured out, we can calculate the
-        // tile size.
-        // Furthermore, if B is sufficiently small, we may be able to
-        // operate on multiple rows of A at the same time.
-        const auto banks_per_column_tile = banksPerColumnTile(num_rows_b);
-        const auto active_column_tiles = activeColumnTiles(num_rows_b, remaining_columns_b);
-        const auto tile_x = min(num_rows_b, banks_per_column_tile * bank_x_);
-        const auto tile_y = min(remaining_columns_b, active_column_tiles * bank_y_);
-        const auto max_parallel_rows_a = min(num_rows_a, numRowsA(num_rows_b, num_cols_b));
-        const auto max_active_banks = banks_per_column_tile * active_column_tiles * max_parallel_rows_a;
-
-        // Load the required tiles of B into the active banks
-        const auto num_bytes = (tile_x * tile_y) / patch_factor;
-        const auto load_b = dram_.access(num_bytes);
-        b_time += load_b.first;
-        b_energy += load_b.second;
-        b_energy += loadSRAM(num_bytes);
-        leakage_energy += (load_b.first * leakage_energy_per_s * max_active_banks);
-
-#ifdef DEBUG
-        std::cout << "\nLoading " << tile_x << " x " << tile_y << " tile of B from DRAM\n";
-        std::cout << "There are " << active_column_tiles << " active column tiles of B "
-                  << "with " << banks_per_column_tile << " PROMISE banks per tile\n";
-#endif
-
-        // Load row(s) of A, compute C, write the result back
-        auto remaining_rows_a = num_rows_a;
-        for (unsigned j = 0; j < num_rows_a; j += max_parallel_rows_a) {
-            const auto active_rows_a = min(remaining_rows_a, max_parallel_rows_a);
-            const auto active_banks = banks_per_column_tile * active_column_tiles * active_rows_a;
-            const auto bank_utilization = (static_cast<double>(active_banks) * 100.0) / static_cast<double>(num_banks_);
-            average_bank_utilization += bank_utilization;
-            iterations++;
-
-#ifdef DEBUG
-            std::cout << "There are a total of " << active_banks << " active banks "
-                      << "operating on " << active_rows_a << " rows of A in parallel\n";
-            std::cout << "Bank utilization is " << bank_utilization << "%\n";
-#endif
-
-            // Load the rows from the scratchpad
-            for (unsigned k = 0; k < active_rows_a; k++) {
-                const auto load_a = scratch_.access((j + k) * num_cols_a, num_cols_a);
-                a_time += load_a.first;
-                a_energy += load_a.second;
-                leakage_energy += (load_a.first * leakage_energy_per_s * active_banks);
-            }
-
-            // All the banks operate in parallel, so use the biggest
-            // computation and count the time only once. Computation
-            // energy is energy per bank x active banks.
-            const auto comp_c = compute(tile_y > bank_y_ ? bank_y_ : tile_y, voltage_swing);
-            compute_time += comp_c.first;
-            compute_energy += (comp_c.second * active_banks);
-
-            // This is sequential, so use tile width and the number of active rows
-            const auto store_c = dram_.access(tile_y * active_rows_a);
-            c_time += store_c.first;
-            c_energy += store_c.second;
-
-            // Leakage is for the entire duration and across all active banks
-            leakage_energy += ((comp_c.first + store_c.first) * leakage_energy_per_s * active_banks);
-
-            remaining_rows_a -= active_rows_a;
-        }
-
-        auto processed_columns_b = active_column_tiles * bank_y_;
-        i += processed_columns_b;
-        remaining_columns_b -= processed_columns_b;
-    }
-
-    const auto memory_time = a_time + b_time + c_time;
-    const auto memory_energy = a_energy + b_energy + c_energy;
-    const auto total_time = compute_time + memory_time;
-    const auto total_energy = compute_energy + memory_energy + leakage_energy;
-
-#ifdef DEBUG
-    std::cout << "------------------------------\n";
-    std::cout << "Compute time:   " << std::to_string(compute_time * 1e3) << " ms\n";
-    std::cout << "Compute energy: " << std::to_string(compute_energy * 1e3) << " mJ\n";
-    std::cout << "Compute power:  " << std::to_string((compute_energy/compute_time) * 1e3) << " mW\n";
-    std::cout << "------------------------------\n";
-
-    std::cout << "Memory time:   " << std::to_string(memory_time * 1e3) << " ms\n";
-    std::cout << "          A:   " << std::to_string(a_time * 1e3) << " ms\n";
-    std::cout << "          B:   " << std::to_string(b_time * 1e3) << " ms\n";
-    std::cout << "          C:   " << std::to_string(c_time * 1e3) << " ms\n";
-    std::cout << "Memory energy: " << std::to_string(memory_energy * 1e3) << " mJ\n";
-    std::cout << "            A: " << std::to_string(a_energy * 1e3) << " mJ\n";
-    std::cout << "            B: " << std::to_string(b_energy * 1e3) << " mJ\n";
-    std::cout << "            C: " << std::to_string(c_energy * 1e3) << " mJ\n";
-    std::cout << "Memory power:  " << std::to_string((memory_energy/memory_time) * 1e3) << " mW\n";
-    std::cout << "------------------------------\n";
-
-    std::cout << "Leakage energy: " << std::to_string(leakage_energy * 1e3) << " mJ\n";
-    std::cout << "Leakage power:  " << std::to_string((leakage_energy/total_time) * 1e3) << " mW\n";
-    std::cout << "------------------------------\n";
-
-    std::cout << "Total time:    " << std::to_string(total_time * 1e3) << " ms\n";
-    std::cout << "Total energy:  " << std::to_string(total_energy * 1e3) << " mJ\n";
-    std::cout << "Average power: " << std::to_string((total_energy/total_time) * 1e3) << " mW\n";
-    std::cout << "------------------------------\n";
-
-    std::cout << "Average bank utilization was " << (average_bank_utilization / iterations) << "%\n";
-    std::cout << "------------------------------\n";
-#endif
-
-    //std::vector<double> result = {total_time, total_energy, compute_time, compute_energy, memory_time, memory_energy, leakage_energy};
-    //return result;
-    return std::make_pair(total_time, total_energy);
-}
-
-Promise::Promise() : 
-        scratch_(use_scratchpad_, scratchpad_size_, dram_latency_, dram_bandwidth_),
-        dram_(dram_latency_, dram_bandwidth_) {
-#ifdef DEBUG
-    std::cout << "Initialized PROMISE with " << num_banks_ << " ["
-              << bank_x << " x " << bank_y << "] banks\n";
-#endif
-}
-
-// TODO better naming?
-std::pair<double, double> Promise::fc_profile(const unsigned num_rows_a,
-                        const unsigned num_cols_a,
-                        const unsigned num_rows_b,
-                        const unsigned num_cols_b,
-                        const unsigned voltage_swing,
-                        const unsigned patch_factor) {
-    return num_rows_a <= num_cols_b ?
-            run(num_rows_a, num_cols_a, num_rows_b, num_cols_b, voltage_swing, patch_factor) :
-            run(num_cols_b, num_rows_b, num_cols_a, num_rows_a, voltage_swing, patch_factor);
-}
-
-std::pair<double, double> Promise::conv_profile(const unsigned n,
-                        const unsigned c,
-                        const unsigned h,
-                        const unsigned w,
-                        const unsigned c_out,
-                        const unsigned c_in,
-                        const unsigned k_h,
-                        const unsigned k_w,
-                        const unsigned s_h,
-                        const unsigned s_w,
-                        const unsigned voltage_swing,
-                        const unsigned patch_factor) { 
-    unsigned num_rows_a = n * h * w / (s_h * s_w);
-    unsigned num_cols_a = c_in * k_h * k_w;
-    unsigned num_rows_b = num_rows_a;
-    unsigned num_cols_b = c_out;
-
-    return num_rows_a <= num_cols_b ? 
-            run(num_rows_a, num_cols_a, num_rows_b, num_cols_b, voltage_swing, patch_factor) :
-            run(num_cols_b, num_rows_b, num_cols_a, num_rows_a, voltage_swing, patch_factor);
-}
-
-/*
-int main(int argc, char *argv[]) {
-    if (argc != NUM_ARGS) {
-        std::cout << "Usage: " << argv[0] << " <#rows A> <#cols A> <#rows B> <#cols B> <patch factor> <voltage swing>\n";
-        exit(1);
-    }
-
-    // Inputs
-    const auto num_rows_a = std::atoi(argv[1]);
-    const auto num_cols_a = std::atoi(argv[2]);
-    const auto num_rows_b = std::atoi(argv[3]);
-    const auto num_cols_b = std::atoi(argv[4]);
-    const auto patch_factor = std::atoi(argv[5]);
-    const auto voltage_swing = std::atoi(argv[6]);
-
-    // Make sure the array dimensions make sense and the swing level is valid
-    assert(num_cols_a == num_rows_b);
-    assert(voltage_swing > 0 and voltage_swing <= VOLTAGE_LEVELS);
-
-    Promise promise;
-
-    auto result = promise.fc_profile(num_rows_a, num_cols_a, num_rows_b, num_cols_b, voltage_swing, patch_factor);
-    std::cout << std::to_string(result.first * 1e3) << ","
-              << std::to_string(result.second * 1e3) << std::endl;
-    return 0;
-}
-*/
diff --git a/llvm/projects/soc_simulator/src/promise_timing_model_ext b/llvm/projects/soc_simulator/src/promise_timing_model_ext
deleted file mode 100755
index 960ac445a104c93fd8a5f71c9323f921cf2bc9e4..0000000000000000000000000000000000000000
Binary files a/llvm/projects/soc_simulator/src/promise_timing_model_ext and /dev/null differ
diff --git a/llvm/projects/soc_simulator/src/ptm b/llvm/projects/soc_simulator/src/ptm
deleted file mode 100755
index 0cdb86b8481a64e096766092c375e8ee682e7dac..0000000000000000000000000000000000000000
Binary files a/llvm/projects/soc_simulator/src/ptm and /dev/null differ
diff --git a/llvm/projects/soc_simulator/src/quantization.cu b/llvm/projects/soc_simulator/src/quantization.cu
deleted file mode 100644
index bb1f34b2bdfe869ebd79b67bf05b0776bfbd3d4a..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/quantization.cu
+++ /dev/null
@@ -1,275 +0,0 @@
-#include <iostream>
-#include <fstream>
-#include <chrono>
-#include "cuda_fp16.h"
-
-// NOTE: This benchmark was originally meant to measure the performance of
-// various conversion routines. However, it is now used for measuring both
-// performance and energy of the actual conversion that needs to happen before
-// offloading the computation to PROMISE.
-// In order to do so, we assume that the initial conversion is only performed
-// for the tile that PROMISE will actually work on, and the conversions of the
-// remaining tiles can be pipelined with PROMISE's computation.  This has two
-// implications:
-// 1. We only add the time for the conversion of the very first tile. This
-// means that this program has to be profiled with the appropriate smaller
-// batch size to obtain the *time* overhead.
-// 2. Since pipelining doesn't affect energy, the program has to be profiled
-// with the full batch size to obtain the *energy* overhead.
-
-#define NUM_ARGS (3)
-#define BLOCK_SIZE (512)
-
-inline cudaError_t checkCuda(cudaError_t result) {
-    if (result != cudaSuccess)
-        std::cerr << "CUDA Runtime Error: " << cudaGetErrorString(result) << "\n";
-    return result;
-}
-
-// init kernel to bring all the pages to the GPU
-template <typename T>
-__global__ void initKernel(T * const __restrict__ array,
-                           const unsigned elements) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        array[idx] = 1;
-}
-
-template <typename T>
-void init(T * const __restrict__ array,
-          const unsigned elements) {
-    const unsigned num_blocks = (elements + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    initKernel<<<num_blocks, BLOCK_SIZE>>>(array, elements);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// float to half
-__global__ void f2hKernel(const float * const __restrict__ input,
-                          const unsigned elements,
-                          half * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = __float2half_rn(input[idx]);
-}
-
-void f2h(const float * const __restrict__ input,
-         const unsigned elements,
-         half * const __restrict__ output) {
-    const unsigned num_blocks = (elements + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    f2hKernel<<<num_blocks, BLOCK_SIZE>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// half to float
-__global__ void h2fKernel(const half * const __restrict__ input,
-                          const unsigned elements,
-                          float * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = __half2float(input[idx]);
-}
-
-void h2f(const half * const __restrict__ input,
-         const unsigned elements,
-         float * const __restrict__ output) {
-    const unsigned num_blocks = (elements + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    h2fKernel<<<num_blocks, BLOCK_SIZE>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// float to char ([-1.0, 1.0] to [-128, 127])
-__global__ void f2cKernel(const float * const __restrict__ input,
-                          const unsigned elements,
-                          char * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = (char) ((127.5 * input[idx]) - 0.5);
-}
-
-void f2c(const float * const __restrict__ input,
-         const unsigned elements,
-         char * const __restrict__ output) {
-    const unsigned num_blocks = (elements + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    f2cKernel<<<num_blocks, BLOCK_SIZE>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// char to float ([-128, 127] to [-1.0, 1.0])
-__global__ void c2fKernel(const char * const __restrict__ input,
-                          const unsigned elements,
-                          float * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = (((float) input[idx]) + 0.5) / 127.5;
-}
-
-void c2f(const char * const __restrict__ input,
-         const unsigned elements,
-         float * const __restrict__ output) {
-    const unsigned num_blocks = (elements + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    c2fKernel<<<num_blocks, BLOCK_SIZE>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// half to char ([-1.0, 1.0] to [-128, 127])
-__global__ void h2cKernel(const half * const __restrict__ input,
-                          const unsigned elements,
-                          char * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = (char) ((127.5 * __half2float(input[idx])) - 0.5);
-}
-
-void h2c(const half * const __restrict__ input,
-         const unsigned elements,
-         char * const __restrict__ output) {
-    const unsigned num_blocks = (elements + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    h2cKernel<<<num_blocks, BLOCK_SIZE>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-// char to half ([-128, 127] to [-1.0, 1.0])
-__global__ void c2hKernel(const char * const __restrict__ input,
-                          const unsigned elements,
-                          half * const __restrict__ output) {
-    const unsigned idx = blockIdx.x * blockDim.x + threadIdx.x;
-    if (idx < elements)
-        output[idx] = __float2half_rn((((float) input[idx]) + 0.5) / 127.5);
-}
-
-void c2h(const char * const __restrict__ input,
-         const unsigned elements,
-         half * const __restrict__ output) {
-    const unsigned num_blocks = (elements + BLOCK_SIZE - 1) / BLOCK_SIZE;
-    c2hKernel<<<num_blocks, BLOCK_SIZE>>>(input, elements, output);
-    checkCuda(cudaDeviceSynchronize());
-}
-
-int main(int argc, char *argv[]) {
-    if (argc != NUM_ARGS) {
-        std::cerr << "Usage: " << argv[0] << " <#elements> <conversion type>\n";
-        exit(1);
-    }
-
-    float *floats;
-    half *halfs;
-    char *chars;
-
-    // Inputs
-    const unsigned n = std::atoi(argv[1]);
-    const std::string type(argv[2]);
-
-    std::chrono::time_point<std::chrono::high_resolution_clock> begin;
-    std::chrono::time_point<std::chrono::high_resolution_clock> end;
-    std::ofstream ofs("profile_data.txt", std::ios::out);
-
-    if (type == "f2h") {
-        checkCuda(cudaMallocManaged(&floats, sizeof(float) * n));
-        checkCuda(cudaMallocManaged(&halfs, sizeof(half) * n));
-
-        init(floats, n);
-        init(halfs, n);
-
-        begin = std::chrono::high_resolution_clock::now();
-        f2h(floats, n, halfs);
-        end = std::chrono::high_resolution_clock::now();
-
-        checkCuda(cudaFree(floats));
-        checkCuda(cudaFree(halfs));
-
-        ofs << "f2h" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-        ofs << "f2h" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-    } else if (type == "h2f") {
-        checkCuda(cudaMallocManaged(&floats, sizeof(float) * n));
-        checkCuda(cudaMallocManaged(&halfs, sizeof(half) * n));
-
-        init(floats, n);
-        init(halfs, n);
-
-        begin = std::chrono::high_resolution_clock::now();
-        h2f(halfs, n, floats);
-        end = std::chrono::high_resolution_clock::now();
-
-        checkCuda(cudaFree(floats));
-        checkCuda(cudaFree(halfs));
-
-        ofs << "h2f" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-        ofs << "h2f" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-    } else if (type == "f2c") {
-        checkCuda(cudaMallocManaged(&floats, sizeof(float) * n));
-        checkCuda(cudaMallocManaged(&chars, sizeof(char) * n));
-
-        init(floats, n);
-        init(chars, n);
-
-        begin = std::chrono::high_resolution_clock::now();
-        f2c(floats, n, chars);
-        end = std::chrono::high_resolution_clock::now();
-
-        checkCuda(cudaFree(floats));
-        checkCuda(cudaFree(chars));
-
-        ofs << "f2c" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-        ofs << "f2c" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-    } else if (type == "c2f") {
-        checkCuda(cudaMallocManaged(&floats, sizeof(float) * n));
-        checkCuda(cudaMallocManaged(&chars, sizeof(char) * n));
-
-        init(floats, n);
-        init(chars, n);
-
-        begin = std::chrono::high_resolution_clock::now();
-        c2f(chars, n, floats);
-        end = std::chrono::high_resolution_clock::now();
-
-        checkCuda(cudaFree(floats));
-        checkCuda(cudaFree(chars));
-
-        ofs << "c2f" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-        ofs << "c2f" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-    } else if (type == "h2c") {
-        checkCuda(cudaMallocManaged(&halfs, sizeof(half) * n));
-        checkCuda(cudaMallocManaged(&chars, sizeof(char) * n));
-
-        init(halfs, n);
-        init(chars, n);
-
-        begin = std::chrono::high_resolution_clock::now();
-        h2c(halfs, n, chars);
-        end = std::chrono::high_resolution_clock::now();
-
-        checkCuda(cudaFree(halfs));
-        checkCuda(cudaFree(chars));
-
-        ofs << "h2c" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-        ofs << "h2c" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-    } else if (type == "c2h") {
-        checkCuda(cudaMallocManaged(&halfs, sizeof(half) * n));
-        checkCuda(cudaMallocManaged(&chars, sizeof(char) * n));
-
-        init(halfs, n);
-        init(chars, n);
-
-        begin = std::chrono::high_resolution_clock::now();
-        c2h(chars, n, halfs);
-        end = std::chrono::high_resolution_clock::now();
-
-        checkCuda(cudaFree(halfs));
-        checkCuda(cudaFree(chars));
-
-        ofs << "c2h" << "\t" << std::to_string(std::chrono::duration<double>(begin.time_since_epoch()).count()) << "\n";
-        ofs << "c2h" << "\t" << std::to_string(std::chrono::duration<double>(end.time_since_epoch()).count()) << "\n";
-        ofs.flush();
-    } else {
-        std::cerr << "Conversion type not supported!\n";
-        exit(1);
-    }
-
-    ofs.close();
-    return 0;
-}
diff --git a/llvm/projects/soc_simulator/src/quantize b/llvm/projects/soc_simulator/src/quantize
deleted file mode 100755
index d977c364e97cae9352e968a2469303cfdba8a650..0000000000000000000000000000000000000000
Binary files a/llvm/projects/soc_simulator/src/quantize and /dev/null differ
diff --git a/llvm/projects/soc_simulator/src/run_configs.pl b/llvm/projects/soc_simulator/src/run_configs.pl
deleted file mode 100755
index ad67ea75ee652f6e504d27135a5898a4503cae26..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/run_configs.pl
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-
-my $smart_dma = 1;
-my $detailed_results = 0;
-
-# Full experiments
-my @mnist_networks = ("lenet", "fc4");
-my @cifar10_networks = ("alexnet", "alexnet2", "resnet18", "vgg16");
-my @cifar100_networks = ("vgg16");
-my @pipelines = ("pipeline_GEMO", "pipeline_GEO", "pipeline_GEOM", "pipeline_GSM", "pipeline_GSME");
-
-# Naive experiments
-my @mnist_networks_naive = ("lenet", "fc4");
-my @cifar10_networks_naive = ("vgg16");
-
-#print "############### NAIVE 1% ##############\n";
-
-# MNIST networks
-foreach my $network (@mnist_networks_naive) {
-    print "Running $network\n";
-    print "Naive Loss1\n";
-    `./driver.pl ../${network}_mnist/${network}_layers.txt ../${network}_mnist/${network}_tensors.txt ../${network}_mnist/${network}_naive1.txt ../${network}_mnist/${network}_naive_results1.csv $smart_dma $detailed_results`;
-}
-
-# CIFAR10 networks
-foreach my $network (@cifar10_networks_naive) {
-    print "Running $network\n";
-    print "Naive Loss1\n";
-    `./driver.pl ../${network}_cifar10/${network}_layers.txt ../${network}_cifar10/${network}_tensors.txt ../${network}_cifar10/${network}_naive1.txt ../${network}_cifar10/${network}_naive_results1.csv $smart_dma $detailed_results`;
-}
-
-#print "############### 1% ##############\n";
-#
-## MNIST networks
-#foreach my $network (@mnist_networks) {
-#    print "Running $network\n";
-#    print "HA Loss1\n";
-#    `./driver.pl ../${network}_mnist/${network}_layers.txt ../${network}_mnist/${network}_tensors.txt ../${network}_mnist/${network}_confs1.txt ../${network}_mnist/${network}_results1.csv $smart_dma $detailed_results`;
-#    print "HS Loss1\n";
-#    `./driver.pl ../${network}_mnist/${network}_layers.txt ../${network}_mnist/${network}_tensors.txt ../${network}_mnist/${network}_promise_confs1.txt ../${network}_mnist/${network}_promise_results1.csv $smart_dma $detailed_results`;
-#}
-#
-## CIFAR10 networks
-#foreach my $network (@cifar10_networks) {
-#    print "Running $network\n";
-#    print "HA Loss1\n";
-#    `./driver.pl ../${network}_cifar10/${network}_layers.txt ../${network}_cifar10/${network}_tensors.txt ../${network}_cifar10/${network}_confs1.txt ../${network}_cifar10/${network}_results1.csv $smart_dma $detailed_results`;
-#    print "HS Loss1\n";
-#    `./driver.pl ../${network}_cifar10/${network}_layers.txt ../${network}_cifar10/${network}_tensors.txt ../${network}_cifar10/${network}_promise_confs1.txt ../${network}_cifar10/${network}_promise_results1.csv $smart_dma $detailed_results`;
-#}
-#
-## CIFAR100 networks
-#foreach my $network (@cifar100_networks) {
-#    print "Running $network\n";
-#    print "HA Loss1\n";
-#    `./driver.pl ../${network}_cifar100/${network}_layers.txt ../${network}_cifar100/${network}_tensors.txt ../${network}_cifar100/${network}_confs1.txt ../${network}_cifar100/${network}_results1.csv $smart_dma $detailed_results`;
-#    print "HS Loss1\n";
-#    `./driver.pl ../${network}_cifar100/${network}_layers.txt ../${network}_cifar100/${network}_tensors.txt ../${network}_cifar100/${network}_promise_confs1.txt ../${network}_cifar100/${network}_promise_results1.csv $smart_dma $detailed_results`;
-#}
-#
-## Image pipelines
-#foreach my $pipeline (@pipelines) {
-#    print "Running $pipeline\n";
-#    print "HA Loss1\n";
-#    `./driver.pl ../${pipeline}/${pipeline}_layers.txt ../${pipeline}/${pipeline}_tensors.txt ../${pipeline}/${pipeline}_confs1.txt ../${pipeline}/${pipeline}_results1.csv $smart_dma $detailed_results`;
-#    print "HS Loss1\n";
-#    `./driver.pl ../${pipeline}/${pipeline}_layers.txt ../${pipeline}/${pipeline}_tensors.txt ../${pipeline}/${pipeline}_promise_confs1.txt ../${pipeline}/${pipeline}_promise_results1.csv $smart_dma $detailed_results`;
-#}
-#
-## vgg16_cifar100_top5
-##print "Running vgg16_cifar100_top5\n";
-##`./driver.pl ../vgg16_cifar100_top5/vgg16_layers.txt ../vgg16_cifar100_top5/vgg16_tensors.txt ../vgg16_cifar100_top5/vgg16_confs1.txt ../vgg16_cifar100_top5/vgg16_results1.csv $smart_dma $detailed_results`;
-##`./driver.pl ../vgg16_cifar100_top5/vgg16_layers.txt ../vgg16_cifar100_top5/vgg16_tensors.txt ../vgg16_cifar100_top5/vgg16_promise_confs1.txt ../vgg16_cifar100_top5/vgg16_promise_results1.csv $smart_dma $detailed_results`;
-#
-#print "############### 2% ##############\n";
-#
-## MNIST networks
-#foreach my $network (@mnist_networks) {
-#    print "Running $network\n";
-#    print "HA Loss2\n";
-#    `./driver.pl ../${network}_mnist/${network}_layers.txt ../${network}_mnist/${network}_tensors.txt ../${network}_mnist/${network}_confs2.txt ../${network}_mnist/${network}_results2.csv $smart_dma $detailed_results`;
-#    print "HS Loss2\n";
-#    `./driver.pl ../${network}_mnist/${network}_layers.txt ../${network}_mnist/${network}_tensors.txt ../${network}_mnist/${network}_promise_confs2.txt ../${network}_mnist/${network}_promise_results2.csv $smart_dma $detailed_results`;
-#}
-#
-## CIFAR10 networks
-#foreach my $network (@cifar10_networks) {
-#    print "Running $network\n";
-#    print "HA Loss2\n";
-#    `./driver.pl ../${network}_cifar10/${network}_layers.txt ../${network}_cifar10/${network}_tensors.txt ../${network}_cifar10/${network}_confs2.txt ../${network}_cifar10/${network}_results2.csv $smart_dma $detailed_results`;
-#    print "HS Loss2\n";
-#    `./driver.pl ../${network}_cifar10/${network}_layers.txt ../${network}_cifar10/${network}_tensors.txt ../${network}_cifar10/${network}_promise_confs2.txt ../${network}_cifar10/${network}_promise_results2.csv $smart_dma $detailed_results`;
-#}
-#
-## CIFAR100 networks
-#foreach my $network (@cifar100_networks) {
-#    print "Running $network\n";
-#    print "HA Loss2\n";
-#    `./driver.pl ../${network}_cifar100/${network}_layers.txt ../${network}_cifar100/${network}_tensors.txt ../${network}_cifar100/${network}_confs2.txt ../${network}_cifar100/${network}_results2.csv $smart_dma $detailed_results`;
-#    print "HS Loss2\n";
-#    `./driver.pl ../${network}_cifar100/${network}_layers.txt ../${network}_cifar100/${network}_tensors.txt ../${network}_cifar100/${network}_promise_confs2.txt ../${network}_cifar100/${network}_promise_results2.csv $smart_dma $detailed_results`;
-#}
-#
-## Image pipelines
-#foreach my $pipeline (@pipelines) {
-#    print "Running $pipeline\n";
-#    print "HA Loss2\n";
-#    `./driver.pl ../${pipeline}/${pipeline}_layers.txt ../${pipeline}/${pipeline}_tensors.txt ../${pipeline}/${pipeline}_confs2.txt ../${pipeline}/${pipeline}_results2.csv $smart_dma $detailed_results`;
-#    print "HS Loss2\n";
-#    `./driver.pl ../${pipeline}/${pipeline}_layers.txt ../${pipeline}/${pipeline}_tensors.txt ../${pipeline}/${pipeline}_promise_confs2.txt ../${pipeline}/${pipeline}_promise_results2.csv $smart_dma $detailed_results`;
-#}
-#
-## vgg16_cifar100_top5
-##print "Running vgg16_cifar100_top5\n";
-##`./driver.pl ../vgg16_cifar100_top5/vgg16_layers.txt ../vgg16_cifar100_top5/vgg16_tensors.txt ../vgg16_cifar100_top5/vgg16_confs2.txt ../vgg16_cifar100_top5/vgg16_results2.csv $smart_dma $detailed_results`;
-##`./driver.pl ../vgg16_cifar100_top5/vgg16_layers.txt ../vgg16_cifar100_top5/vgg16_tensors.txt ../vgg16_cifar100_top5/vgg16_promise_confs2.txt ../vgg16_cifar100_top5/vgg16_promise_results2.csv $smart_dma $detailed_results`;
-#
-# Alexnet HA Loss2 without pDMA
-#print "Running Alexnet without pDMA\n";
-#$smart_dma = 0;
-#$detailed_results = 1;
-#`./driver.pl ../alexnet_cifar10/alexnet_layers.txt ../alexnet_cifar10/alexnet_tensors.txt ../alexnet_cifar10/alexnet_confs2.txt ../alexnet_cifar10/alexnet_results2_nodma.csv $smart_dma $detailed_results`;
-
diff --git a/llvm/projects/soc_simulator/src/table_generator.py b/llvm/projects/soc_simulator/src/table_generator.py
deleted file mode 100644
index 528b8e0ef5677cec9ccdba37abfde696544029cc..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/src/table_generator.py
+++ /dev/null
@@ -1,308 +0,0 @@
-import glob
-import os 
-import subprocess
-import shutil 
-import sys
-
-from collections import defaultdict
-
-'''
-FORMAT
-
-** LayerName NumOpsInLayer <cols>
-OpName Col1Val Col2Val ...
-
-** Conv1 1 h2f_time h2f_energy fp32_time fp32_energy f2h_time f2h_energy fp16_perf_time fp16_perf_energy fp16_time fp16_energy
-Conv1 51.8808 97.2844 319.582 601.966 12.81 18.758 388.092 650.649 340.037 590.664
-
-'''
-
-class TableGenerator: 
-
-    __ops_header_delimiter = "#"
-    __table_header_delimter = "**" 
-    __time_col_name = "time" 
-    __energy_col_name = "energy"
-
-    '''
-    Stores all precision conversions used. 
-    '''
-    precision_conversions = frozenset(["h2f", "f2h"]) 
-
-    def __init__(self, network_name, dir_path, soc_ops_file, iters, profiler_binary_name):
-        '''
-        Args:
-            dir_path:               Path of directory containing network binaries
-            iters:                  Number of iterations to run each binary for
-            profiler_binary_name:   Name of offline profiler binary to run 
-        '''
-        self.__network_name = network_name
-        self.__dir_path = dir_path
-
-        # Name of the actual directory 
-        self.__soc_ops_filename = soc_ops_file
-        self.__iters = iters 
-        self.__profiler_binary_name = profiler_binary_name
-
-        # Path to results directory 
-        self.__results_dir_path = "%s_results" % self.__network_name
-
-        # Outputted table file
-        self.__table_filename = "%s_tensors.txt" % self.__network_name
-
-		# Nested default dictionary of default dicts
-        self.__table = self.__build_nested_default_dict()
-
-
-    def generate_table(self):
-        '''
-        Generates a table file called <network_name>_tensors.txt in the following 
-        steps:
-        1. Runs the offline profiler against the inputted binaries to generate
-        results files
-        2. Builds an internal table storing all data from the parsed results files
-        the offline profiler generated
-        3. Writes the internal table to <network_name>_tensors.txt file and uses the 
-        <network_name>_ops.txt file as a guideline in terms of row order 
-        '''
-        #self.__run_inputted_binaries()
-        self.__build_internal_table()
-        self.__output_table_to_file()
-
-
-    def __run_inputted_binaries(self):
-        '''
-        Invokes the profiler to run all appropriate binaries (must start with the network 
-        name) in the inputted directory. Result files generated by the profiler are 
-        stored in the results file directory and are named <binary_name>.txt. These results
-        files are then parsed in a later step to generate the table
-        '''
-        if not os.path.isdir(self.__dir_path):
-            print("ERROR: Directory %s not found" % self.__dir_path)
-            exit(1)
-
-        try:
-            os.mkdir(self.__results_dir_path)
-        except OSError:
-            if os.path.isdir(self.__results_dir_path):
-                print("Directory already exists. Clearing directory.")
-                for old_file in glob.glob(os.path.join(self.__results_dir_path, "*")):
-                    os.remove(old_file)
-            else:
-                print("ERROR: Directory doesn't exist but failed to create dir")
-
-        for binary_name in os.listdir(self.__dir_path):
-            binary_path = os.path.join(self.__dir_path, binary_name)
-
-            if not self.__should_execute_file(binary_path):
-                continue
-            
-            output_file = os.path.join(self.__results_dir_path, binary_name + ".txt")
-            # No stdout/stderr piping needed for now
-            subprocess.Popen([profiler_binary_name, binary_path, str(self.__iters), \
-                        output_file]).communicate()
-
-
-    def __build_internal_table(self):
-        '''
-        Iterates through each results file generated by the runs of the offline
-        profiler and stores the data in a dictionary in the following format:
-            [operation name][approximation type OR conversion type][time/energy]
-        '''
-        for results_file_name in os.listdir(self.__results_dir_path):
-            # Ignore if it's not a results file
-            if results_file_name == self.__table_filename or \
-                        not results_file_name.startswith(self.__network_name):
-                continue
-
-            approx_type = self.__get_approximation_type(results_file_name)
-            results_file = open(os.path.join(self.__results_dir_path, results_file_name), "r")
-            for line in results_file:
-                line = line.strip()
-                op_name, total_time, total_energy = self.__parse_tensor_operation_line(line)
-                # If the current operation is f2h or h2f  
-                if any(op_name.endswith(prec_conv) for prec_conv in TableGenerator.precision_conversions):
-                    # Get the original operation name (without the f2h/h2f) and the conversion type 
-                    orig_op_name, conversion_type = self.__get_original_operation_name(op_name)
-
-                    if orig_op_name not in self.__table:
-                        print("ERROR: Conversion found but original %s is not in the table" % orig_op_name)
-                        exit(1)
-
-                    # Store f2h and h2f as columns in the row belonging to the original operation
-                    approx_type_no_fp_prefix = approx_type[5 : ]
-                    self.__table[orig_op_name][conversion_type + "_" + approx_type_no_fp_prefix][TableGenerator.__time_col_name] = total_time
-                    self.__table[orig_op_name][conversion_type + "_" + approx_type_no_fp_prefix][TableGenerator.__energy_col_name] = total_energy
-
-                # Create a new row in the dictionary
-                else:
-                    self.__table[op_name][approx_type][TableGenerator.__time_col_name] = total_time
-                    self.__table[op_name][approx_type][TableGenerator.__energy_col_name] = total_energy
-            results_file.close()
-
-
-    def __output_table_to_file(self):
-        '''
-        Outputs the internally stored table to a file using the <network_name>_ops.txt file as
-        a guideline in the following steps:
-        1. Opens the ops file and the file to output the table to
-        2. Reads a line from the ops file (guaranteed to be the layers/NML header)
-        3. For each operation in the layer (or 1 operation if the "layer" is a NML), we store the
-        time and the energy
-        '''
-        table_file_path = os.path.join(self.__results_dir_path, self.__table_filename)
-        soc_operations_file = open(self.__soc_ops_filename, "r")
-        table_file = open(table_file_path, "w")
-
-        curr_line = soc_operations_file.readline().strip()
-
-        while curr_line:
-            # First line is always the layers line (#layer_name,num_ops)
-            layer_name, num_ops = self.__parse_layer_info_line(curr_line)
-
-            # List of strings, where each string is a row corresponding to an operation
-            # in the layer
-            ops_in_layer = []
-
-            # Stores a list of elements in the header, which will be joined into a string
-            # The header is only generated for the first operation in the layer
-            # CRITICAL ASSUMPTION: All operations within a layer have the same # columns
-            # or everything breaks bc the header is per layer, not per operation
-            header = [TableGenerator.__table_header_delimter, layer_name, str(num_ops)]
-
-            # Iterate through all operations within the layer 
-            for op_in_layer_count in range(num_ops):
-                # Contains the operation name 
-                curr_line = soc_operations_file.readline().strip()
-
-                # Stores a list of elements that will be joined to make up a row 
-                curr_op = [curr_line]
-                operation_data = self.__table[curr_line]
-                # Iterate through time/energy data for each approximation type corresponding
-                # to the current operation
-                for approx_type in operation_data:
-                    op_time = operation_data[approx_type][TableGenerator.__time_col_name]
-                    op_energy = operation_data[approx_type][TableGenerator.__energy_col_name]
-                    curr_op.append(op_time)
-                    curr_op.append(op_energy)
-                    if op_in_layer_count == 0:
-                        if approx_type == "fp32_perf20":
-                            header.append("fp32_time")
-                            header.append("fp32_energy")
-                        elif approx_type == "fp16_perf20":
-                            header.append("fp16_time")
-                            header.append("fp16_energy")
-                        elif approx_type.find("f2h_perf20") != -1:
-                            header.append("f2h_time")
-                            header.append("f2h_energy")
-                        else:
-                            header.append("%s_time" % approx_type)
-                            header.append("%s_energy" % approx_type)
-                ops_in_layer.append(' '.join(curr_op))
-            # Getting all operation rows and then writing everything because
-            # calls to write() are slow (memory vs time tradeoff)
-            table_file.write("%s\n%s\n" % (' '.join(header), '\n'.join(ops_in_layer)))
-
-            curr_line = soc_operations_file.readline().strip()
-
-
-    def __should_execute_file(self, file_path):
-        '''
-        Checks if the file at the given file path is a binary that should be run
-        by the profiler. Must exist, be a binary, and must start with the network
-        name as per our naming standards.
-
-        Args:
-            file_path:          Path of the file to check 
-        '''
-        return os.path.isfile(file_path) and os.access(file_path, os.X_OK) and \
-                file_path.find(self.__network_name) != -1
-
-
-    def __get_approximation_type(self, results_filename):
-        '''
-        Parses a given results filename for the approximation type. 
-        Format assumption: <network_name>_<approx_type>.txt
-            
-        Args:
-            results_filename:      Name of results file
-
-        Returns:
-            the approximation technique (ex: fp16) 
-        '''
-        approx_type_start_ind = results_filename.find("_", results_filename.find("_") + 1) + 1 
-        approx_type_end_ind = results_filename.find(".txt")
-        return results_filename[approx_type_start_ind : approx_type_end_ind] 
-   
-
-    def __parse_tensor_operation_line(self, tensor_op_line):
-        '''
-        Parses a tensor operation line (within a output file from the offline
-        profiler for the operation name, the total time used, and the total
-        energy used
-
-        Args:
-            tensor_op_line:        Tensor operation line from output file
-
-        Returns:
-            operation name
-            total time used
-            total energy used
-        '''
-        line_as_list = tensor_op_line.split(",")
-        return line_as_list[0], line_as_list[1], line_as_list[2] 
-
-
-    def __build_nested_default_dict(self):
-        '''
-        Builds a nested default dictionary with an arbitrary number of levels
-        '''
-        return defaultdict(self.__build_nested_default_dict)
-
-    def __get_original_operation_name(self, op_name):
-        '''
-        Parses an operation name containing _<conversion type> for the original
-        operation name.
-        Format assumption: <original_op_name>_<conversion type>
-
-        Args:
-            op_name:        Name of the operation
-        
-        Returns:
-            the original operation name 
-        '''
-        underscore_ind = op_name.find("_")
-        return op_name[ : underscore_ind], op_name[underscore_ind + 1 : ]
-
-
-    def __parse_layer_info_line(self, layer_info_line): #layer_name,num_ops
-        '''
-        Parses a layer header (from the original ops.txt file) into the layer name
-        and the number of operations
-        Assumed format: #layer_name,num_ops
-
-        Args:
-            layer_info_line:    Line at the beginning of each layer in the ops file
-
-        Returns:
-            layer name
-            number of ops in the layer
-        '''
-        comma_ind = layer_info_line.find(",")
-        return layer_info_line[layer_info_line.find(TableGenerator.__ops_header_delimiter) + 1 : comma_ind], \
-                    int(layer_info_line[comma_ind + 1 : ])
-
-
-if __name__ == "__main__":
-    if len(sys.argv) != 6:
-        print("python table_generator.py <network name> <binary dir path> <soc_ops file> <num itrs> <profiler bin path>")
-        print("soc ops file: ~/soc_simular/%s_cifar10/%s_ops.txt")
-        exit(1)
-
-    network_name = sys.argv[1]
-    binary_dir_path = sys.argv[2]
-    soc_ops_file = sys.argv[3]
-    num_iters = int(sys.argv[4]) 
-    profiler_binary_name = sys.argv[5]
-    table_gen = TableGenerator(network_name, binary_dir_path, soc_ops_file, num_iters, profiler_binary_name)
-    table_gen.generate_table()
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_confs1.txt b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_confs1.txt
deleted file mode 100644
index 69a3b7cc2fbbad37d571bf1009d4efbd982ce0f7..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_confs1.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9
-9 9 9,7,7,9 9 9 9,9 9 9,7,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9
-9 9 9,7,7,8 8 8 8,8 8 8,9 9 9,7,7,8 8 8,7,7,7,8 8 8 8,9 9 9,9 9
-9 9 9,7,7,7,9 9 9,8 8 8,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9
-9 9 9,7,7,7,8 8 8,8 8 8,7,7,9 9 9,7,7,7,9 9 9 9,9 9 9,9 9
-9 9 9,7,7,9 9 9 9,8 8 8,7,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9
-9 9 9,7,7,7,9 9 9,8 8 8,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9
-9 9 9,7,7,8 8 8 8,8 8 8,9 9 9,7,7,8 8 8,7,7,7,8 8 8 8,9 9 9,9 9
-9 9 9,7,7,9 9 9 9,9 9 9,7,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9
-9 9 9,7,7,7,8 8 8,8 8 8,7,7,9 9 9,7,7,7,9 9 9 9,9 9 9,9 9
-9 9 9,7,7,9 9 9 9,8 8 8,7,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9
-9 9 9,7,7,9 9 9 9,9 9 9,7,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9
-9 9 9,7,7,7,9 9 9,8 8 8,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9
-9 9 9,7,7,9 9 9 9,8 8 8,7,7,8 8 8,7,7,8 8 8,8 8 8,7,9 9 9,9 9
-9 9 9,7,7,8 8 8 8,8 8 8,9 9 9,7,7,8 8 8,7,7,7,8 8 8 8,9 9 9,9 9
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_confs2.txt b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_confs2.txt
deleted file mode 100644
index 73bef415abe86b2cfb918aa56ab7a2ba13b9021d..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_confs2.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,7,8 8 8,4,7,7,4,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,5,8 8 8,8 8 8 8,7,6,7,7,8 8 8,4,4,4,8 8 8 8,8 8 8,8 8
-9 9 9,7,4,8 8 8 8,7,8 8 8,4,7,8 8 8,4,4,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,4,8 8 8 8,7,5,4,7,7,7,4,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,7,7,7,6,7,7,8 8 8,4,4,4,7,8 8 8,8 8
-9 9 9,8 8 8 8,5,7,4,6,8 8 8 8,7,4,7,4,4,7,8 8 8,8 8
-9 9 9,8 8 8 8,4,8 8 8 8,8 8 8,4,4,7,7,8 8 8 8,8 8 8,5,7,8 8 8,8 8
-9 9 9,7,8 8 8,8 8 8 8,7,8 8 8,4,7,7,4,4,8 8 8,6,8 8 8,8 8
-9 9 9,8 8 8 8,4,8 8 8 8,7,4,7,8 8 8,7,7,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,7,4,8 8 8 8,7,6,4,7,8 8 8,4,7,4,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,4,8 8 8 8,7,4,7,8 8 8,7,7,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,4,8 8 8 8,7,5,4,7,7,7,4,8 8 8,7,8 8 8,8 8
-9 9 9,7,4,8 8 8 8,7,8 8 8,4,7,8 8 8,4,4,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,4,8 8 8 8,8 8 8,4,4,7,7,8 8 8 8,8 8 8,5,7,8 8 8,8 8
-9 9 9,5,8 8 8,8 8 8 8,7,6,7,7,8 8 8,4,4,4,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,5,7,4,6,8 8 8 8,7,4,7,4,4,7,8 8 8,8 8
-9 9 9,7,8 8 8,8 8 8 8,7,8 8 8,4,7,7,4,4,8 8 8,6,8 8 8,8 8
-9 9 9,7,4,8 8 8 8,7,6,4,7,8 8 8,4,7,4,8 8 8 8,8 8 8,8 8
-9 9 9,5,5,8 8 8 8,4,6,4,7,8 8 8,4,4,4,7,8 8 8,8 8
-9 9 9,8 8 8 8,7,7,7,6,7,7,8 8 8,4,4,4,7,8 8 8,8 8
-9 9 9,7,4,8 8 8 8,7,8 8 8,8 8 8 8,7,7,4,4,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,4,8 8 8 8,8 8 8,4,4,7,7,8 8 8 8,8 8 8,5,7,8 8 8,8 8
-9 9 9,8 8 8 8,7,7,7,6,7,7,8 8 8,4,4,4,7,8 8 8,8 8
-9 9 9,8 8 8 8,5,8 8 8 8,8 8 8,8 8 8,7,8 8 8,7,7,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,5,8 8 8,8 8 8 8,7,6,7,7,8 8 8,4,4,4,8 8 8 8,8 8 8,8 8
-9 9 9,7,8 8 8,8 8 8 8,7,8 8 8,4,7,7,4,4,8 8 8,6,8 8 8,8 8
-9 9 9,8 8 8 8,5,7,4,6,8 8 8 8,7,4,7,4,4,7,8 8 8,8 8
-9 9 9,8 8 8 8,4,8 8 8 8,7,4,7,8 8 8,7,7,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,4,8 8 8 8,7,5,4,7,7,7,4,8 8 8,7,8 8 8,8 8
-9 9 9,7,4,8 8 8 8,7,6,4,7,8 8 8,4,7,4,8 8 8 8,8 8 8,8 8
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_fp16.csv b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_fp16.csv
deleted file mode 100644
index 04c4cfc4efb2b0fe6f94ddc332d356ba2966da72..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_fp16.csv
+++ /dev/null
@@ -1,148 +0,0 @@
-Add1,69.7114,211.862,107.8,104.062,6085.83,3099.48,2986.34,49.2963,152.515,80.4532,73.6474,4385.91,2317.11,2113.98
-Add10,41.8232,272.933,216.003,56.9294,13052.8,10330.2,2722.54,29.5763,193.08,152.806,40.2755,9233.85,7307.84,1926.04
-Add10_f2h,40.0563,257.396,205.254,52.142,12850.3,10247,2603.29,28.3409,182.148,145.252,36.8963,9087.18,7246.3,1840.92
-Add10_h2f,10.5152,61.7303,48.6618,13.0686,11780.8,9286.75,2494.08,7.45143,43.7235,34.4675,9.2562,8348.86,6581.42,1767.47
-Add11,32.499,205.175,160.15,45.0253,12627.7,9856.71,2770.94,22.9909,145.16,113.305,31.856,8930.63,6971.08,1959.62
-Add11_f2h,4.84751,25.3885,19.8298,5.55869,10456,8166.49,2289.46,3.43461,18.1215,14.1539,3.96771,7436.9,5808.46,1628.52
-Add11_h2f,2.71712,12.3719,9.65008,2.72179,9106.03,7102.66,2003.36,1.92229,8.92178,6.95893,1.96292,6563.12,5119.11,1444.06
-Add12,32.3458,201.907,158.806,43.1007,12485.6,9820.54,2665.04,22.8734,142.79,112.309,30.483,8830.29,6945.65,1884.77
-Add12_f2h,4.88662,25.364,19.9643,5.39974,10378.2,8169.14,2209.1,3.45883,18.0489,14.2063,3.84293,7376.52,5806.53,1570.11
-Add12_h2f,2.69777,11.7633,9.24658,2.51676,8720.78,6854.97,1865.81,1.90775,8.49592,6.67815,1.81791,6297.92,4950.42,1347.6
-Add13,32.2976,199.983,158.214,41.7697,12384.7,9798.1,2586.56,22.8397,141.436,111.896,29.5422,8758.82,6929.74,1829.23
-Add13_f2h,4.91853,25.4026,20.1163,5.28634,10314.5,8167.97,2146.57,3.50657,18.2597,14.4609,3.79914,7330.34,5804.95,1525.56
-Add13_h2f,2.71496,12.0093,9.49625,2.51304,8840.31,6990.41,1849.9,1.92046,8.68342,6.8666,1.81698,6383.37,5047.78,1335.71
-Add14,0.999932,9.47008,7.42388,2.0462,18954.6,14861.4,4093.23,0.708467,7.11012,5.57305,1.53723,14214.1,11144.8,3069.6
-Add14_f2h,1.88605,7.19323,5.64536,1.54786,7493.19,5878.86,1614.33,1.34509,5.54955,4.35727,1.1924,5686.64,4462.19,1224.59
-Add14_h2f,0.760782,0.960605,0.753025,0.20758,2523.02,1977.84,545.18,0.538123,1.10915,0.869399,0.23978,2911.43,2282.18,629.323
-Add15,0.354443,1.81062,1.41722,0.393395,10528.4,8243.7,2284.66,0.255481,2.29175,1.79496,0.496829,13969.3,10946.3,3023.19
-Add15_f2h,0.764484,1.36953,1.07351,0.296021,3102.43,2430.16,672.268,0.572894,1.59231,1.24879,0.343541,3452.8,2704.95,747.932
-Add15_h2f,0.120211,0.0438951,0.0343584,0.00953666,722.085,565.205,156.881,0.0858258,0.0897805,0.0702545,0.0195276,1484.29,1161.5,322.825
-Add1_f2h,314.261,894.668,472.564,422.104,5684.96,2999.37,2685.59,222.29,654.332,365.302,299.156,4151.15,2314.09,1902.21
-Add1_h2f,83.2799,265.625,135.314,130.311,6379.44,3249.94,3129.5,58.8888,190.41,99.6739,92.2071,4573.2,2394.11,2214.4
-Add2,69.1905,303.338,199.548,103.79,8768.52,5768.33,3000.19,48.9274,214.509,141.116,73.3959,6200.7,4079.22,2121.55
-Add2_f2h,336.556,1525.29,1077.85,447.443,9065.06,6405.97,2659.09,238.064,1078.88,762.399,316.488,6410.37,4530.13,1880.3
-Add2_h2f,83.1178,365.325,234.29,131.036,8790.52,5637.51,3153.01,58.7743,258.343,165.686,92.6601,6216.16,3986.69,2229.56
-Add3,40.9358,206.435,132.407,74.0277,10086.1,6469.28,3616.86,28.9503,146.001,93.6465,52.3555,7132.63,4575.01,2557.68
-Add3_f2h,52.9835,255.846,166.622,89.2241,9657.91,6289.83,3368.08,37.4671,180.924,117.829,63.0955,6829.52,4447.85,2381.69
-Add3_h2f,41.6412,204.32,129.501,74.8188,9814.92,6220.84,3594.07,29.4469,144.508,91.593,52.9164,6942.13,4400.1,2542.08
-Add4,37.6205,200.601,138.934,61.6676,10664.8,7386.36,3278.47,26.6043,141.874,98.2614,43.6138,7542.19,5223.74,2318.49
-Add4_f2h,146.069,779.141,555.206,223.935,10669.7,7603.38,3066.3,103.384,551.386,392.897,158.49,7544.72,5376.51,2168.23
-Add4_h2f,41.3499,213.133,145.707,67.426,10308.5,7047.34,3261.17,29.2407,150.732,103.047,47.6846,7289.72,4983.6,2306.13
-Add5,39.1711,216.321,149.235,67.0856,11045,7619.75,3425.3,27.7004,152.99,105.545,47.445,7810.9,5388.61,2422.31
-Add5_f2h,27.4739,144.775,100.707,44.0675,10539.5,7331.45,3208.09,19.4282,102.386,71.2208,31.165,7453.51,5184.78,2268.74
-Add5_h2f,20.8433,109.029,74.8169,34.2117,10461.4,7178.79,3282.61,14.7445,77.147,52.9395,24.2078,7398.96,5077.34,2321.65
-Add6,38.2773,222.684,163.353,59.3305,11635.7,8535.58,3100.14,27.0679,157.484,115.525,41.9589,8228.62,6036.27,2192.36
-Add6_f2h,78.3804,453.985,337.891,116.094,11585.2,8622.75,2962.45,55.4654,321.237,239.086,82.1516,8192.13,6097.33,2094.81
-Add6_h2f,20.7551,113.98,83.0303,30.9501,10983.3,8000.93,2982.38,14.6773,80.6183,58.7275,21.891,7767.8,5658.58,2109.24
-Add7,38.3104,225.263,168.705,56.5575,11759.8,8807.22,2952.55,27.0935,159.323,119.322,40.0017,8316.1,6228.2,2087.92
-Add7_f2h,81.46,477.128,362.784,114.344,11714.6,8907.2,2807.39,57.624,337.519,256.632,80.8876,8283.65,6298.49,1985.17
-Add7_h2f,20.7616,115.225,85.6469,29.5778,11099.7,8250.44,2849.26,14.6822,81.4998,60.5791,20.9209,7850.09,5834.99,2015.11
-Add8,42.0846,253.744,188.878,64.8666,12058.8,8976.12,3082.68,29.7613,179.461,133.584,45.8767,8527.74,6347.77,2180
-Add8_f2h,14.6636,82.9186,61.9626,20.956,11309.1,8451,2858.12,10.3711,58.6733,43.8445,14.8289,8000.2,5978.33,2021.89
-Add8_h2f,10.5141,57.5583,42.7041,14.8542,10948.7,8123.2,2825.55,7.43528,40.7375,30.2242,10.5133,7748.37,5748.74,1999.65
-Add9,41.8259,265.047,205.792,59.2543,12674.6,9841.07,2833.5,29.5785,187.445,145.539,41.9062,8963.23,6959.46,2003.8
-Add9_f2h,39.8499,247.783,193.75,54.033,12435.2,9723.35,2711.83,28.1894,175.299,137.075,38.2246,8793.58,6875.93,1917.67
-Add9_h2f,10.4589,60.1816,46.5522,13.6294,11508.3,8902.04,2606.31,7.39612,42.594,32.9479,9.64626,8144.6,6300.12,1844.51
-Conv1,310.228,694.841,342.376,352.465,5137.93,2796.38,2341.55,227.742,502.069,283.327,253.153,4000.56,2481.3,1665.08
-Conv10,481.652,3018.22,2355.36,662.857,12534.5,9781.99,2752.5,340.625,2134.28,1665.53,468.767,8863.57,6917.32,1946.32
-Conv10_f2h,16.267,94.4725,71.7156,22.7569,11613.2,8815.27,2797.92,11.5068,66.8653,50.7629,16.1033,8215,6235.99,1979.12
-Conv10_h2f,10.5438,65.7062,52.8554,12.8508,12463.8,10026.1,2437.66,7.45594,46.5025,37.4079,9.09478,8820.88,7095.76,1725.14
-Conv11,173.774,1077.96,835.557,242.405,12410.5,9620.47,2790,122.957,762.523,591.018,171.513,8775.98,6803.22,1972.86
-Conv11_f2h,6.79928,37.3658,28.7179,8.64791,10989.1,8446.21,2542.86,4.81142,26.5033,20.3687,6.13521,7787.5,5985.57,1802.09
-Conv11_h2f,2.77964,12.8278,10.0359,2.79191,9229.29,7220.6,2008.69,1.96619,9.21135,7.20639,2.00505,6624.71,5182.79,1441.97
-Conv12,172.103,1054.21,824.356,229.857,12254.7,9583.51,2671.18,121.754,745.623,583.022,162.614,8665.94,6777.23,1888.86
-Conv12_f2h,6.35668,34.3,26.5952,7.70476,10790.7,8366.52,2424.14,4.49979,24.3436,18.8767,5.46737,7649.53,5931.31,1718.37
-Conv12_h2f,2.76929,12.2454,9.6556,2.58978,8837.54,6968.57,1868.97,1.95863,8.84718,6.97582,1.87148,6378.23,5029.25,1349.07
-Conv13,171.684,1041.08,819.652,221.43,12131.7,9552.2,2579.48,121.445,736.263,579.64,156.639,8579.07,6755.25,1824.02
-Conv13_f2h,6.20782,33.1375,25.8857,7.25183,10669.3,8333.91,2335.41,4.39486,23.5409,18.3911,5.15048,7566.21,5910.3,1656.12
-Conv13_h2f,2.76826,12.0417,9.55355,2.48819,8700.26,6902.56,1797.7,1.95823,8.68074,6.88708,1.79382,6270.3,4974.74,1295.67
-Conv1_f2h,7.62006,16.0868,8.21342,7.87343,4543.5,2445.6,2097.91,5.45871,12.1698,7.31434,5.59711,3581.38,2233.61,1494.73
-Conv1_h2f,83.4767,247.452,143.632,103.82,5928.39,3441,2487.39,59.0279,183.614,113.023,73.667,4398.72,2707.49,1764.94
-Conv2,1399.51,5412.29,3378.78,2033.51,7735.19,4829.01,2906.18,989.654,3830.09,2393.05,1438.02,5474.06,3420.35,2055.12
-Conv2_f2h,109.635,397.898,194.591,203.307,7252.79,3544.58,3708.21,77.5331,282.821,139.764,143.829,5150.57,2542.09,2622.66
-Conv2_h2f,83.3307,436.945,336.643,100.302,10486.9,8079.59,2407.3,58.9247,308.997,238.072,70.9279,7415.87,5713.65,1702.27
-Conv3,354.47,1585.56,948.24,637.318,8947.03,5350.96,3596.07,250.683,1121.23,670.544,450.697,6326.72,3783.93,2542.83
-Conv3_f2h,28.5399,124.01,71.14,52.8695,8689.56,4984.75,3704.81,20.1903,87.7504,50.3439,37.4082,6145.43,3525.53,2620.02
-Conv3_h2f,41.6322,203.92,135.55,68.3698,9796.09,6511.67,3284.41,29.4395,144.212,95.8623,48.3504,6927.44,4604.9,2322.58
-Conv4,543.421,2780.23,1853.46,926.763,10232.8,6821.89,3410.92,384.277,1965.96,1310.62,655.348,7235.83,4823.95,2411.91
-Conv4_f2h,52.8973,253.818,152.989,100.829,9599.81,5786.36,3813.45,37.4134,179.493,108.19,71.3037,6789.28,4092.37,2696.95
-Conv4_h2f,41.5238,236.022,174.197,61.8248,11368,8390.24,2977.78,29.3628,166.909,123.187,43.7213,8038.87,5933.13,2105.74
-Conv5,278.988,1427.88,945.3,482.585,10236.5,6776.91,3459.59,197.282,1009.7,668.449,341.252,7238.45,4792.15,2446.32
-Conv5_f2h,14.8008,70.9043,45.1842,25.7201,9581.37,6105.87,3475.5,10.4668,50.1662,31.9688,18.1977,6778.52,4319.78,2458.78
-Conv5_h2f,20.8418,110.25,77.5339,32.7165,10579.5,7440.09,3139.45,14.7375,78.0695,54.9032,23.1665,7491.32,5268.35,2222.99
-Conv6,490.314,2738.69,1950.74,787.949,11171.6,7957.5,3214.1,346.719,1936.58,1379.4,557.184,7899.63,5626.93,2272.73
-Conv6_f2h,28.103,144.6,96.262,48.3381,10291.3,6851.06,3440.28,19.8745,102.268,68.0815,34.1867,7277.97,4845.06,2432.93
-Conv6_h2f,20.8654,122.714,92.9633,29.7508,11762.4,8910.71,2851.66,14.7546,86.7919,65.7503,21.0417,8318.79,6302.01,2016.79
-Conv7,514.632,2922.64,2150.64,772,11358.7,8358.43,3000.25,363.913,2066.64,1520.74,545.901,8031.96,5910.47,2121.51
-Conv7_f2h,28.2053,152.501,107.622,44.8785,10813.7,7631.44,3182.25,19.9546,107.904,76.1494,31.7549,7647.43,5396.98,2250.48
-Conv7_h2f,20.8436,124.457,96.3593,28.0977,11942.1,9246.03,2696.05,14.7391,88.0253,68.1525,19.8729,8446.1,6539.32,1906.8
-Conv8,240.716,1378.49,1006.49,371.999,11454.2,8363.32,3090.86,170.247,974.885,711.794,263.094,8099.49,5913.93,2185.59
-Conv8_f2h,9.68469,49.5859,35.2796,14.3062,10241.5,7287.22,2954.3,6.87516,35.2358,25.0675,10.1686,7251.09,5159.52,2091.63
-Conv8_h2f,10.5597,59.0058,44.3452,14.6605,11175.6,8398.89,2776.67,7.46752,41.7631,31.3867,10.3764,7908.98,5943.95,1965.05
-Conv9,477.878,2885.71,2189.28,696.429,12080.4,9165.65,2914.72,338.05,2040.9,1548.28,492.646,8542.53,6481.6,2061.04
-Conv9_f2h,16.2347,90.1067,65.6407,24.466,11099.8,8085.77,3014.08,11.4848,63.772,46.458,17.3143,7851.77,5719.72,2132.09
-Conv9_h2f,10.5851,63.9065,50.4134,13.4931,12075.4,9525.82,2549.57,7.48625,45.2319,35.6818,9.55026,8545.5,6741.23,1804.29
-Mul1,5.49296,37.0659,29.09,7.97593,13530.7,10624,2906.69,3.93147,26.5587,20.8353,5.72401,9606.57,7543.24,2063.55
-Mul1_f2h,2.18449,8.22985,6.46583,1.76402,7507.4,5897.26,1610.14,1.54872,6.15957,4.84093,1.31877,5588.96,4391.09,1197.99
-Mul1_h2f,0.810781,1.37103,1.07631,0.294716,3393.71,2664.12,729.59,0.574493,1.51826,1.19191,0.326392,3760.1,2951.69,808.488
-Mul2,1.31517,8.1931,6.40985,1.78324,12447.5,9739.56,2707.96,0.931548,6.00801,4.69986,1.30826,9120.64,7136.62,1984.19
-Mul2_f2h,1.76803,6.49192,5.08483,1.40709,7151.61,5599.64,1551.97,1.26447,5.09282,3.99098,1.10192,5498.59,4306.31,1192.38
-Mul2_h2f,0.133188,0.0183427,0.0143504,0.0039923,261.068,204.28,56.7875,0.0949979,0.06404,0.0500845,0.013956,879.603,688.197,191.411
-Pool1,79.7262,360.302,206.802,153.501,9038.84,5188.08,3850.75,56.3789,254.792,146.244,108.551,6391.73,3668.8,2722.98
-Pool1_f2h,106.496,467.712,270.25,197.462,8783.74,5075.34,3708.4,75.3091,330.755,191.123,139.636,6211.29,3589.1,2622.27
-Pool1_h2f,20.9488,92.2916,53.56,38.7316,8810.98,5113.32,3697.66,14.8149,65.2847,37.8887,27.3971,6231.7,3616.65,2615.16
-Pool2,40.8451,210.857,135.193,75.6642,10325.2,6620.16,3705.01,28.8847,149.127,95.6146,53.5134,7301.99,4681.87,2620.15
-Pool2_f2h,52.6109,263.231,170.165,93.0664,10007,6469.01,3537.99,37.2057,186.157,120.341,65.8167,7076.35,4574.53,2501.84
-Pool2_h2f,10.5892,50.2118,32.2214,17.9904,9482.45,6084.99,3397.46,7.49375,35.5699,22.8258,12.7444,6710.71,4306.37,2404.39
-Pool3,21.7732,125.807,90.0357,35.7709,11556.7,8270.76,3285.91,15.3981,89.0019,63.696,25.3063,8175.07,5850.69,2324.42
-Pool3_f2h,27.105,148.004,106.551,41.4535,10921,7862.21,3058.75,19.1678,104.677,75.3585,29.3184,7723.33,5560.2,2163.15
-Pool3_h2f,5.37692,25.667,18.3482,7.31879,9547.33,6824.97,2722.36,3.80213,18.2207,13.0249,5.19588,6777.51,4844.84,1932.7
-Pool4,12.2268,81.0192,62.5832,18.436,13254.2,10238.3,3015.89,8.64895,57.3698,44.3151,13.055,9382.73,7247.8,2134.97
-Pool4_f2h,14.3183,85.3146,66.1354,19.1792,11917,9238.05,2678.99,10.1261,60.363,46.7931,13.5702,8430.6,6535.39,1895.24
-Pool4_h2f,2.76689,12.3171,9.50437,2.81277,8901.99,6869.12,2032.88,1.95715,8.87091,6.84512,2.02583,6408.16,4944.77,1463.42
-Pool5,4.24587,28.8535,22.6878,6.16569,13613.1,10704.3,2908.83,3.01182,20.5632,16.1689,4.39464,9690.26,7619.75,2070.69
-Pool5_f2h,4.30952,21.6535,17.0404,4.6131,10038.8,7899.74,2139.08,3.05175,15.4491,12.159,3.29042,7146.27,5623.84,1522.54
-Pool5_h2f,0.789328,1.12606,0.885458,0.240603,2854.59,2244.62,609.971,0.558354,1.28688,1.01189,0.27502,3258.62,2562.17,696.513
-Relu1,63.043,222.775,109.755,113.019,7068.58,3483,3585.58,44.5829,158.651,79.4284,79.9495,5034.27,2521.06,2536.23
-Relu10,7.8553,54.9037,42.8846,12.0191,13979.1,10918.9,3060.23,5.55636,38.9249,30.404,8.52113,9908.04,7739.05,2169.03
-Relu10_f2h,14.2674,85.1708,66.8096,18.3612,11939.8,9365.82,2573.95,10.0908,60.2697,47.2767,12.9932,8447.54,6626.47,1821.1
-Relu10_h2f,10.4698,61.6665,48.0288,13.6376,11779.2,9174.16,2605.03,7.40475,43.6532,33.9994,9.654,8336.17,6492.58,1843.64
-Relu11,2.09916,16.791,13.0701,3.72095,15998.3,12453,3545.28,1.48462,12.0634,9.39021,2.67327,11491.9,8945.38,2546.63
-Relu11_f2h,4.48398,23.4935,18.3062,5.18733,10483.3,8168.53,2314.74,3.17253,16.7097,13.0203,3.68949,7455.03,5808.96,1646.13
-Relu11_h2f,2.65339,11.6345,9.04902,2.58551,8767.81,6819.4,1948.41,1.87634,8.37685,6.51523,1.86173,6311.27,4908.75,1402.6
-Relu12,2.09976,17.1837,13.475,3.70873,16368.8,12836,3532.8,1.48516,12.3639,9.69591,2.66821,11777.3,9235.93,2541.54
-Relu12_f2h,4.44727,22.6715,17.7981,4.87339,10189.2,7998.76,2190.47,3.14699,16.1439,12.6738,3.47034,7246.5,5688.6,1558.02
-Relu12_h2f,2.64916,11.3108,8.86276,2.44801,8539.6,6691.38,1848.22,1.87333,8.17249,6.4039,1.76875,6170.04,4834.82,1335.34
-Relu13,2.10382,16.7344,13.1973,3.53714,15914.8,12551.1,3363.76,1.48869,12.0067,9.46927,2.53767,11417,9004.38,2412.85
-Relu13_f2h,4.36041,22.0096,17.3783,4.63133,10092.1,7968.08,2123.99,3.0874,15.6907,12.3898,3.30126,7183.38,5671.67,1511.87
-Relu13_h2f,2.72796,11.4339,9.01109,2.4228,8560.24,6746.45,1813.78,2.03488,8.23175,6.48774,1.74417,6172.8,4865.11,1307.81
-Relu14,1.14743,10.6248,8.32102,2.30382,18530.7,14512.9,4017.87,0.811516,7.89588,6.18393,1.71212,13778.4,10791.3,2987.38
-Relu14_f2h,1.5964,5.26066,4.12403,1.13663,6435.18,5042.95,1392.23,1.14442,4.24048,3.32542,0.91515,5108.03,4003.08,1105.06
-Relu14_h2f,0.762322,0.9319,0.729587,0.202312,2419.48,1894.24,525.243,0.539977,1.14939,0.899736,0.249678,2965.98,2321.79,644.251
-Relu1_f2h,116.511,379.888,188.365,191.522,6536.78,3247.4,3289.38,82.4051,270.74,136.938,135.452,4668.45,2369.38,2327.06
-Relu1_h2f,83.4086,300.518,148.77,151.748,7206.56,3567.67,3638.89,58.9804,213.679,107.062,107.336,5124.37,2567.66,2573.96
-Relu2,62.4233,282.215,169.665,112.55,9042.07,5436.06,3606.01,44.1442,199.587,119.994,79.596,6394.13,3844.28,2549.94
-Relu2_f2h,106.67,463.637,286.355,177.281,8692.96,5369.01,3323.95,75.4328,327.879,202.516,125.367,6147.13,3796.78,2350.42
-Relu2_h2f,83.0734,372.529,221.009,151.52,8968.66,5320.81,3647.85,58.7425,263.431,156.291,107.144,6342.03,3762.66,2579.46
-Relu3,31.1491,157.975,97.2597,60.7158,10143.1,6244.72,3898.34,22.0274,111.732,68.79,42.9423,7173.3,4416.4,2756.93
-Relu3_f2h,53.9219,262.044,163.271,98.7728,9719.42,6055.82,3663.6,38.1317,185.316,115.466,69.8509,6872.96,4282.31,2590.67
-Relu3_h2f,41.4745,201.753,123.336,78.4172,9731.62,5949.14,3782.48,29.3302,142.684,87.2272,55.4573,6883.19,4207.91,2675.32
-Relu4,31.1126,165.225,109.288,55.9374,10620.9,7025.18,3595.76,22.0035,116.872,77.306,39.5668,7511.45,4968.48,2543
-Relu4_f2h,52.5946,268.222,180.055,88.1672,10199.7,6846.98,3352.7,37.1932,189.687,127.336,62.3518,7212.66,4841.86,2370.81
-Relu4_h2f,41.3761,210.186,137.878,72.3072,10159.6,6664.53,3495.09,29.259,148.644,97.5098,51.1349,7184.42,4712.91,2471.53
-Relu5,15.6197,87.4008,58.9328,28.468,11190.8,7545.75,3645.04,11.048,61.8499,41.7045,20.1457,7916.78,5338.18,2578.64
-Relu5_f2h,27.1876,142.132,96.6157,45.5163,10455.9,7107.57,3348.35,19.2268,100.527,68.3345,32.1932,7394.61,5026.63,2368
-Relu5_h2f,20.7564,107.244,71.9782,35.2657,10333.7,6935.58,3398.08,14.6781,75.8517,50.9089,24.9429,7308.38,4905.11,2403.28
-Relu6,15.5774,91.8805,65.7777,26.1027,11796,8444.84,3351.21,11.0158,65.0123,46.543,18.4696,8345.51,5974.6,2370.94
-Relu6_f2h,26.9604,148.182,106.898,41.2837,10992.7,7930.13,3062.55,19.0651,104.8,75.6027,29.1976,7774.08,5608.24,2165.85
-Relu6_h2f,20.7925,111.946,79.7133,32.2323,10779.9,7676.09,3103.81,14.709,79.2691,56.4457,22.8237,7636.46,5437.77,2198.71
-Relu7,15.5863,93.2475,68.0626,25.1848,11966.2,8734.34,3231.84,11.0234,65.9937,48.1698,17.8242,8467.81,6180.86,2286.99
-Relu7_f2h,27.0202,149.73,110.21,39.5201,11082.9,8157.67,2925.21,19.1073,105.894,77.944,27.9502,7837.75,5769.07,2068.69
-Relu7_h2f,20.7296,113.484,82.3887,31.0951,10948.8,7948.82,3000.02,14.6587,80.2669,58.2735,21.9936,7743.7,5621.92,2121.8
-Relu8,7.88397,50.8594,37.3843,13.4751,12900,9482.44,3417.54,5.60997,36.5052,26.8322,9.67313,9199.71,6762.66,2437.09
-Relu8_f2h,14.4739,80.2487,59.2479,21.0008,11105.4,8199.18,2906.22,10.2403,56.8303,41.9579,14.8727,7866.72,5808.05,2058.7
-Relu8_h2f,10.4677,57.0219,41.8152,15.2067,10895,7989.54,2905.49,7.40341,40.3631,29.5989,10.7642,7710.49,5654.25,2056.24
-Relu9,7.84042,52.587,40.301,12.286,13413.6,10279.8,3133.81,5.54547,37.2786,28.5693,8.70947,9505.87,7285.1,2220.81
-Relu9_f2h,14.2726,83.3662,64.1642,19.202,11681.8,8991.1,2690.7,10.0949,58.9936,45.4054,13.5884,8264.16,6360.65,1903.53
-Relu9_h2f,10.4563,59.3499,45.3517,13.9982,11352.1,8674.64,2677.47,7.39481,42.0028,32.0961,9.90692,8032.96,6138.36,1894.63
-Softmax1,2.01231,8.10635,6.34144,1.76491,8058.61,6304.39,1754.22,1.42328,5.95817,4.66119,1.29711,5923.64,4634.63,1289.13
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_fp32.csv b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_fp32.csv
deleted file mode 100644
index 2e203cf73d4f5220f9f3217398c952496028fb62..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_fp32.csv
+++ /dev/null
@@ -1,50 +0,0 @@
-Add1,113.913,376.561,219.636,156.925,6609.15,3854.17,2754.97,80.5497,282.75,177.622,111.325,4961.17,3115.93,1954.23
-Add10,46.7852,353.064,295.37,57.6943,15092.7,12626.5,2466.15,33.0858,249.698,208.891,40.8191,10672.6,8928.67,1744.47
-Add11,32.7593,228.974,188.206,40.768,13979,11490.1,2488.93,23.1655,161.932,133.101,28.8363,9885.52,8125.42,1760.39
-Add12,32.7648,220.92,180.753,40.1662,13485,11033.3,2451.71,23.1723,156.257,127.848,28.4139,9536.19,7802.45,1733.98
-Add13,32.723,215.055,175.489,39.5662,13144.2,10726,2418.24,23.1396,152.085,124.105,27.9836,9295.27,7585.25,1710.24
-Add14,0.902452,1.91287,1.55297,0.359903,4206.37,3415.11,791.258,0.639486,1.96075,1.59154,0.36925,4301.37,3491.69,809.769
-Add15,0.226743,0.146973,0.119291,0.0276818,1268.32,1029.69,238.626,0.161374,0.274735,0.222974,0.0517663,2347.78,1906.32,441.514
-Add2,114.041,707.602,550.188,157.413,12409.4,9648.8,2760.62,80.6401,500.915,389.6,111.35,8784.41,6832.28,1952.72
-Add3,51.2818,306.368,231.407,74.9613,11948.4,9024.9,2923.5,36.2641,216.686,163.674,53.0143,8450.22,6382.88,2067.43
-Add4,51.329,344.524,275.839,68.6848,13425,10748.6,2676.4,36.2967,243.665,195.087,48.5789,9494.83,7601.95,1892.93
-Add5,48.2779,312.538,246.161,66.377,12947.4,10197.6,2749.79,34.1397,221.022,174.082,46.9412,9155.71,7211.24,1944.51
-Add6,48.3038,331.31,269.638,61.6728,13717.7,11164.2,2553.51,34.1584,234.299,190.684,43.6161,9700.3,7894.61,1805.74
-Add7,48.1751,337.048,277.011,60.0367,13992.6,11500.2,2492.4,34.0674,238.357,195.898,42.4598,9894.64,8132.16,1762.55
-Add8,46.6813,323.76,263.597,60.1628,13871.3,11293.7,2577.64,33.0123,228.964,186.417,42.5484,9808.96,7986.22,1822.79
-Add9,46.6775,338.552,280.794,57.7577,14509.8,12034.3,2475.46,33.0125,239.488,198.63,40.8649,10264.6,8513.31,1751.56
-Conv1,163.415,464.69,270.83,193.861,5719.79,3344.04,2375.74,115.618,364.219,239.324,137.798,4496.91,2961.18,1689.91
-Conv10,802.661,5895.34,4887.39,1007.95,14689.8,12178.3,2511.49,567.577,4168.67,3455.92,712.856,10387.4,8611.5,1776.16
-Conv11,308.859,2238.17,1841.36,396.805,14494.3,11924.8,2569.46,218.426,1582.75,1302.12,280.685,10249.2,8432.33,1817.28
-Conv12,308.957,2143.29,1754.35,388.942,13875.6,11357.9,2517.72,218.489,1515.61,1240.55,275.097,9811.8,8031.51,1780.54
-Conv13,308.526,2076.4,1693.52,382.878,13461.8,10980,2481.86,218.187,1468.29,1197.52,270.808,9519.36,7764.54,1755.1
-Conv2,1186.37,5295.79,3610.95,1684.84,9029.61,6180.44,2849.17,840.943,3760.72,2581.03,1191.77,6467.54,4470.42,2016.67
-Conv3,613.82,3267.16,2333.45,933.711,10647.5,7604.92,3042.61,434.112,2311.16,1650.96,660.338,7532.26,5381.04,2151.65
-Conv4,982.846,5808.36,4413.44,1394.92,11819.9,8981.38,2838.55,695.005,4107.39,3121,986.415,8358.48,6351.33,2007.22
-Conv5,491.91,3047.42,2348.81,698.608,12391.2,9550.74,2840.5,347.871,2154.95,1660.93,494.035,8762.18,6753.64,2008.58
-Conv6,858.952,5554.16,4403.19,1150.97,12932.7,10252.8,2679.96,607.382,3927.42,3113.55,813.89,9144.98,7249.96,1895.06
-Conv7,859.425,5760.38,4659.21,1101.17,13405.5,10842.9,2562.58,607.715,4073.23,3294.58,778.676,9479.18,7667.16,1812.06
-Conv8,434.046,2940.43,2368.34,572.081,13549.6,10913.5,2636.05,306.938,2079.26,1674.71,404.559,9581.1,7717.15,1864.02
-Conv9,801.893,5651.89,4624.92,1026.97,14096.7,11535.3,2561.33,567.034,3996.52,3270.33,726.23,9967.96,8156.84,1811.22
-Mul1,10.6715,65.8927,53.5391,12.3536,12394.8,10071.9,2322.88,7.57836,46.691,37.9335,8.75846,8780.28,7134.75,1645.72
-Mul2,0.821147,1.66598,1.35236,0.313621,4076.84,3309.78,767.066,0.582562,1.7168,1.39365,0.323189,4224.31,3430.04,794.372
-Pool1,175.393,1012.62,726.774,285.844,11549.1,8289.34,3259.78,124.05,716.6,514.49,202.181,8172.74,5868.18,2305.37
-Pool2,88.7044,575.997,444.625,131.372,12987.9,10025.6,2962.21,62.7347,407.367,314.456,92.9131,9184.52,7089.77,2094.78
-Pool3,45.2833,307.898,247.595,60.3023,13598.6,10935.2,2663.36,32.0292,217.79,175.137,42.6548,9616.06,7732.71,1883.4
-Pool4,24.6055,179.513,148.419,31.0941,14591.4,12063.9,2527.45,17.4004,126.964,104.972,21.9976,10319,8531.59,1787.91
-Pool5,8.20681,50.2013,40.8424,9.35894,12226.9,9947.17,2279.71,5.82151,35.6921,29.0395,6.65352,8656.71,7042.81,1614.13
-Relu1,75.1381,267.733,149.248,118.484,7127.84,3973.96,3153.88,53.1331,197.389,117.092,83.9442,5255.76,3118.27,2234.45
-Relu10,9.31493,65.6928,54.5384,11.1544,14104.6,11709.8,2394.86,6.58686,46.4969,38.6024,7.89659,9982.66,8287.84,1695.28
-Relu11,2.49349,12.8041,10.5081,2.29598,10267.6,8426.39,1841.24,1.76354,9.25156,7.5928,1.65903,7415.31,6085.61,1329.91
-Relu12,2.50068,12.3579,10.0966,2.26132,9872.52,8066.18,1806.35,1.77005,8.95586,7.31638,1.63973,7136.08,5830.14,1306.13
-Relu13,2.48762,12.2284,9.96443,2.26399,9833.11,8012.58,1820.53,1.75929,8.83304,7.19779,1.63547,7102.38,5787.52,1315.04
-Relu14,0.846016,1.57242,1.27739,0.29503,3703.4,3008.56,694.844,0.599154,1.69071,1.37363,0.317122,3975.03,3229.61,745.515
-Relu2,74.295,455.493,338.819,116.674,12261.1,9120.32,3140.75,52.5363,322.394,239.897,82.5246,8677.47,6456.88,2221.33
-Relu3,36.6237,218.815,161.907,56.9082,11949.1,8841.42,3107.67,25.8986,154.77,114.524,40.248,8450.92,6253.36,2197.68
-Relu4,36.7139,244.573,191.979,52.5941,13329.1,10462.8,2866.31,25.9761,173.026,135.817,37.2093,9428.22,7400.79,2027.47
-Relu5,18.3751,115.933,90.1275,25.8055,12618,9809.37,2808.64,12.9942,82.009,63.7545,18.2548,8924.67,6938.12,1986.59
-Relu6,18.3515,122.286,98.2308,24.0552,13327.1,10705.5,2621.61,12.9773,86.4993,69.4836,17.016,9426.46,7572.14,1854.36
-Relu7,18.3889,125.084,101.453,23.631,13603.6,11033.6,2570.02,13.0046,88.4856,71.7689,16.7172,9621.58,7803.86,1817.78
-Relu8,9.34284,60.6176,48.9763,11.6413,12976.3,10484.3,2492.01,6.60743,42.914,34.6725,8.24179,9185.09,7421.14,1764
-Relu9,9.34283,63.4928,52.2617,11.2311,13592.4,11188.1,2404.33,6.60725,44.9455,36.9955,7.95121,9621.07,7919.28,1702.05
-Softmax1,2.09116,9.17789,7.4508,1.72709,8772.87,7122.11,1650.76,1.47902,6.75862,5.48652,1.27228,6456.75,5241.62,1215.29
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_layers.txt b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_layers.txt
deleted file mode 100644
index af6469192145b246beaec42cf42a6629e5ed1a93..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_layers.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-Conv1,2000,3,32,32,64,3,3,3,1,1
-Conv2,2000,64,32,32,64,64,3,3,1,1
-Conv3,2000,64,16,16,128,64,3,3,1,1
-Conv4,2000,128,16,16,128,128,3,3,1,1
-Conv5,2000,128,8,8,256,128,3,3,1,1
-Conv6,2000,256,8,8,256,256,3,3,1,1
-Conv7,2000,256,8,8,256,256,3,3,1,1
-Conv8,2000,256,4,4,512,256,3,3,1,1
-Conv9,2000,512,4,4,512,512,3,3,1,1
-Conv10,2000,512,4,4,512,512,3,3,1,1
-Conv11,2000,512,2,2,512,512,3,3,1,1
-Conv12,2000,512,2,2,512,512,3,3,1,1
-Conv13,2000,512,2,2,512,512,3,3,1,1
-FC1,2000,512,512,512
-FC2,2000,512,512,10
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_naive1.txt b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_naive1.txt
deleted file mode 100644
index 90a09b7a87bfde672e576b124225a9efbebe069c..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_naive1.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9
-8 8 8,7,7,7,7,7,7,7,7,7,7,7,7,7,7
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_naive_results1.csv b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_naive_results1.csv
deleted file mode 100644
index 3036f34f281fb2142622187df7d369b1a9dbcd3e..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_naive_results1.csv
+++ /dev/null
@@ -1,14 +0,0 @@
-Time
-Configuration,Total,Improvement
-c0,9504.422158,0.999999989478582
-c1,1323.9068,7.17907139693886
-
-c1,1323.9068
-
-Energy
-Configuration,Total,Improvement
-c0,58514.286843,0.999999998291016
-c1,4256.555187,13.7468640479566
-
-c1,4256.555187
-
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_ops.txt b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_ops.txt
deleted file mode 100644
index 2075774fde3e66afd1a1946cac46b87038a6486f..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_ops.txt
+++ /dev/null
@@ -1,64 +0,0 @@
-#Conv1,3
-Conv1
-Add1
-Relu1
-#Conv2,4
-Conv2
-Add2
-Relu2
-Pool1
-#Conv3,3
-Conv3
-Add3
-Relu3
-#Conv4,4
-Conv4
-Add4
-Relu4
-Pool2
-#Conv5,3
-Conv5
-Add5
-Relu5
-#Conv6,3
-Conv6
-Add6
-Relu6
-#Conv7,4
-Conv7
-Add7
-Relu7
-Pool3
-#Conv8,3
-Conv8
-Add8
-Relu8
-#Conv9,3
-Conv9
-Add9
-Relu9
-#Conv10,4
-Conv10
-Add10
-Relu10
-Pool4
-#Conv11,3
-Conv11
-Add11
-Relu11
-#Conv12,3
-Conv12
-Add12
-Relu12
-#Conv13,4
-Conv13
-Add13
-Relu13
-Pool5
-#FC1,3
-Mul1
-Add14
-Relu14
-#FC2,2
-Mul2
-Add15
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_confs1.txt b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_confs1.txt
deleted file mode 100644
index e7b8720b064ac873815b3222371f587b6d3eace9..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_confs1.txt
+++ /dev/null
@@ -1,48 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9
-8 8 8,8 8 8 8,8 8 8,7,7,5,7,6,8 8 8,7,7,6,7,7,7
-8 8 8,5,8 8 8,7,7,6,8 8 8 8,8 8 8,7,5,8 8 8,6,7,9 9 9,9 9
-9 9 9,5,9 9 9,9 9 9 9,6,6,9 9 9 9,6,6,8 8 8 8,9 9 9,6,9 9 9 9,6,8 8
-8 8 8,8 8 8 8,8 8 8,7,6,5,7,7,5,5,5,8 8 8,7,7,5
-8 8 8,8 8 8 8,8 8 8,7,7,5,9 9 9 9,6,5,5,7,6,7,7,5
-8 8 8,8 8 8 8,8 8 8,5,7,5,7,8 8 8,6,5,7,6,7,9 9 9,7
-8 8 8,6,8 8 8,8 8 8 8,7,8 8 8,7,6,9 9 9,7,8 8 8,8 8 8,9 9 9 9,9 9 9,6
-9 9 9,8 8 8 8,8 8 8,7,7,6,7,6,5,9 9 9 9,7,7,9 9 9 9,7,7
-8 8 8,8 8 8 8,8 8 8,7,7,5,7,7,5,5,7,6,8 8 8 8,8 8 8,8 8
-9 9 9,9 9 9 9,8 8 8,7,7,5,7,6,7,5,7,9 9 9,7,7,5
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,9 9 9 9,7,6,9 9 9 9,7,5,8 8 8 8,6,8 8
-8 8 8,8 8 8 8,8 8 8,7,5,5,7,6,9 9 9,6,7,6,9 9 9 9,7,7
-8 8 8,8 8 8 8,8 8 8,7,7,8 8 8,8 8 8 8,6,5,5,5,6,7,9 9 9,7
-9 9 9,8 8 8 8,8 8 8,5,5,5,9 9 9 9,6,9 9 9,6,7,6,7,7,6
-9 9 9,8 8 8 8,8 8 8,7,7,9 9 9,9 9 9 9,6,5,5,7,6,7,7,6
-8 8 8,8 8 8 8,8 8 8,7,5,5,7,6,9 9 9,9 9 9 9,7,6,9 9 9 9,7,7
-9 9 9,8 8 8 8,8 8 8,7,7,8 8 8,8 8 8 8,6,7,5,7,6,7,7,6
-8 8 8,8 8 8 8,8 8 8,7,7,6,7,6,7,5,8 8 8,6,7,7,7
-8 8 8,8 8 8 8,8 8 8,7,7,5,7,6,5,5,9 9 9,6,7,7,7
-8 8 8,8 8 8 8,8 8 8,7,5,5,7,9 9 9,5,5,7,6,8 8 8 8,7,5
-9 9 9,8 8 8 8,8 8 8,5,6,6,8 8 8 8,6,6,7,7,5,8 8 8 8,7,8 8
-9 9 9,9 9 9 9,8 8 8,5,6,6,8 8 8 8,7,6,7,7,5,8 8 8 8,7,8 8
-8 8 8,8 8 8 8,8 8 8,7,7,5,7,9 9 9,7,5,7,9 9 9,7,9 9 9,7
-8 8 8,8 8 8 8,8 8 8,8 8 8 8,7,5,8 8 8 8,9 9 9,5,5,7,8 8 8,7,7,7
-8 8 8,8 8 8 8,8 8 8,7,5,8 8 8,6,8 8 8,7,5,6,9 9 9,7,5,7
-8 8 8,8 8 8 8,8 8 8,7,7,8 8 8,9 9 9 9,9 9 9,5,5,7,6,7,7,7
-9 9 9,8 8 8 8,8 8 8,6,7,5,9 9 9 9,6,7,5,7,9 9 9,7,7,8 8
-8 8 8,8 8 8 8,9 9 9,8 8 8 8,8 8 8,8 8 8,9 9 9 9,5,8 8 8,5,7,6,8 8 8 8,5,6
-9 9 9,8 8 8 8,8 8 8,7,7,7,9 9 9 9,6,7,5,7,8 8 8,7,7,7
-8 8 8,9 9 9 9,8 8 8,7,5,8 8 8,9 9 9 9,7,8 8 8,8 8 8 8,7,7,9 9 9 9,6,7
-8 8 8,8 8 8 8,9 9 9,8 8 8 8,8 8 8,8 8 8,9 9 9 9,5,8 8 8,5,7,6,8 8 8 8,5,7
-9 9 9,8 8 8 8,8 8 8,5,9 9 9,9 9 9,6,6,9 9 9,6,9 9 9,6,5,7,7
-8 8 8,8 8 8 8,8 8 8,7,7,5,8 8 8 8,5,5,7,9 9 9,6,7,7,8 8
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,5,5,6,6,5,7,8 8 8,8 8 8,9 9 9 9,5,6
-8 8 8,9 9 9 9,9 9 9,7,6,5,6,9 9 9,6,8 8 8 8,8 8 8,6,7,7,8 8
-8 8 8,8 8 8 8,9 9 9,5,5,7,7,8 8 8,9 9 9,5,6,9 9 9,9 9 9 9,7,7
-8 8 8,8 8 8 8,8 8 8,7,7,5,7,9 9 9,5,5,7,8 8 8,8 8 8 8,9 9 9,7
-8 8 8,8 8 8 8,8 8 8,7,7,9 9 9,8 8 8 8,9 9 9,5,8 8 8 8,8 8 8,6,7,7,7
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,8 8 8,9 9 9,6,6,7,7,8 8 8,8 8 8,9 9 9 9,5,6
-8 8 8,8 8 8 8,8 8 8,7,5,9 9 9,8 8 8 8,9 9 9,5,6,9 9 9,8 8 8,5,7,7
-8 8 8,9 9 9 9,8 8 8,6,7,6,9 9 9 9,8 8 8,6,9 9 9 9,9 9 9,5,5,6,5
-8 8 8,8 8 8 8,9 9 9,8 8 8 8,8 8 8,8 8 8,8 8 8 8,6,8 8 8,9 9 9 9,9 9 9,6,8 8 8 8,7,6
-8 8 8,8 8 8 8,8 8 8,6,7,7,6,8 8 8,6,8 8 8 8,8 8 8,9 9 9,6,6,8 8
-8 8 8,9 9 9 9,8 8 8,6,7,6,9 9 9 9,8 8 8,6,9 9 9 9,9 9 9,8 8 8,9 9 9 9,6,5
-8 8 8,9 9 9 9,8 8 8,9 9 9 9,7,5,9 9 9 9,8 8 8,6,9 9 9 9,9 9 9,8 8 8,9 9 9 9,9 9 9,5
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,7,6,9 9 9 9,8 8 8,7,9 9 9 9,9 9 9,8 8 8,9 9 9 9,9 9 9,5
-9 9 9,8 8 8 8,8 8 8,9 9 9 9,7,7,9 9 9 9,8 8 8,6,9 9 9 9,9 9 9,8 8 8,9 9 9 9,6,5
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_confs2.txt b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_confs2.txt
deleted file mode 100644
index f846e81c4b2ab1c7681debe4ec84de99be49fade..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_confs2.txt
+++ /dev/null
@@ -1,52 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9
-8 8 8,7,8 8 8,6,5,5,6,6,7,7,6,5,5,9 9 9,5
-8 8 8,5,5,6,5,9 9 9,6,6,7,7,6,5,5,6,5
-8 8 8,7,8 8 8,8 8 8 8,5,5,8 8 8 8,6,7,7,6,5,7,6,5
-8 8 8,8 8 8 8,5,6,5,5,9 9 9 9,6,7,7,6,5,9 9 9 9,6,5
-9 9 9,7,5,6,5,5,9 9 9 9,6,7,6,6,5,5,6,5
-8 8 8,8 8 8 8,5,6,7,5,6,6,7,7,9 9 9,5,5,6,5
-9 9 9,7,7,5,6,7,6,7,9 9 9,7,5,7,5,6,8 8
-8 8 8,7,5,6,5,5,8 8 8 8,6,7,7,6,5,5,9 9 9,5
-8 8 8,5,8 8 8,7,9 9 9,8 8 8,7,7,5,6,6,7,6,6,6
-8 8 8,8 8 8 8,6,6,6,7,6,6,7,5,9 9 9,5,9 9 9 9,9 9 9,6
-8 8 8,9 9 9 9,7,7,8 8 8,6,9 9 9 9,9 9 9,7,5,6,5,5,7,9 9
-8 8 8,7,7,6,5,5,6,7,7,7,8 8 8,5,5,6,5
-8 8 8,7,5,6,5,5,7,8 8 8,7,7,6,9 9 9,5,6,9 9
-8 8 8,8 8 8 8,9 9 9,9 9 9 9,6,6,7,6,7,6,5,5,5,6,7
-9 9 9,7,6,6,5,5,6,7,6,5,5,9 9 9,8 8 8 8,5,6
-8 8 8,6,6,6,5,8 8 8,7,8 8 8,8 8 8,7,5,9 9 9,5,9 9 9,6
-8 8 8,6,5,6,5,9 9 9,8 8 8 8,6,7,7,6,5,9 9 9 9,5,5
-8 8 8,7,5,6,5,5,6,9 9 9,7,5,6,9 9 9,5,6,6
-9 9 9,7,5,7,5,5,6,6,7,6,9 9 9,5,9 9 9 9,6,5
-9 9 9,6,5,7,9 9 9,9 9 9,5,7,8 8 8,5,7,5,5,6,7
-8 8 8,7,5,6,5,5,6,6,7,5,6,9 9 9,9 9 9 9,6,5
-9 9 9,6,8 8 8,8 8 8 8,5,7,5,7,7,5,9 9 9,6,9 9 9 9,5,7
-9 9 9,7,6,6,5,5,6,7,6,8 8 8 8,5,9 9 9,8 8 8 8,5,6
-9 9 9,5,5,8 8 8 8,5,8 8 8,7,5,9 9 9,5,9 9 9,7,8 8 8 8,8 8 8,5
-8 8 8,7,5,6,5,6,9 9 9 9,9 9 9,7,7,6,9 9 9,6,6,5
-8 8 8,6,5,6,5,8 8 8,9 9 9 9,7,7,7,9 9 9,5,5,6,5
-8 8 8,6,6,6,7,8 8 8,8 8 8 8,9 9 9,7,7,8 8 8,7,6,5,5
-8 8 8,6,6,7,7,8 8 8,8 8 8 8,9 9 9,6,7,8 8 8,7,6,5,5
-8 8 8,7,6,6,5,7,6,7,6,7,8 8 8,5,9 9 9 9,6,9 9
-8 8 8,7,7,7,6,9 9 9,9 9 9 9,7,9 9 9,7,6,9 9 9,6,6,6
-8 8 8,7,6,7,5,7,9 9 9 9,5,6,7,8 8 8,7,7,8 8 8,5
-9 9 9,6,5,7,7,6,5,5,6,8 8 8 8,6,8 8 8,8 8 8 8,5,6
-8 8 8,6,5,5,8 8 8,5,8 8 8 8,7,9 9 9,6,9 9 9,9 9 9,5,7,6
-8 8 8,7,5,6,5,5,6,6,7,8 8 8 8,6,9 9 9,8 8 8 8,6,6
-8 8 8,7,5,7,7,6,5,6,6,8 8 8 8,9 9 9,5,7,5,6
-8 8 8,7,6,7,5,7,5,8 8 8,5,6,8 8 8,8 8 8,5,6,7
-9 9 9,8 8 8 8,5,8 8 8 8,9 9 9,8 8 8,7,5,9 9 9,7,8 8 8,7,6,8 8 8,5
-8 8 8,7,6,7,5,7,5,8 8 8,5,6,8 8 8,8 8 8,5,8 8 8,7
-9 9 9,6,9 9 9,5,6,9 9 9,7,8 8 8,6,5,7,7,5,7,7
-8 8 8,6,6,6,8 8 8,9 9 9,8 8 8 8,6,6,8 8 8 8,7,6,6,6,5
-8 8 8,7,9 9 9,6,5,8 8 8,9 9 9 9,6,7,5,6,5,5,6,5
-8 8 8,5,8 8 8,6,5,5,6,6,9 9 9,7,9 9 9,5,5,6,5
-8 8 8,6,9 9 9,6,5,8 8 8,6,6,7,7,7,8 8 8,5,6,8 8
-8 8 8,6,5,9 9 9 9,9 9 9,6,7,7,7,9 9 9 9,8 8 8,5,8 8 8 8,6,8 8
-8 8 8,7,8 8 8,6,5,5,8 8 8 8,6,7,7,6,8 8 8,5,6,5
-8 8 8,6,8 8 8,6,6,6,7,7,6,5,9 9 9,5,9 9 9 9,7,8 8
-9 9 9,7,8 8 8,6,5,5,9 9 9 9,6,5,9 9 9 9,7,6,8 8 8 8,6,9 9
-8 8 8,7,9 9 9,7,5,5,9 9 9 9,7,7,7,6,8 8 8,5,8 8 8,8 8
-8 8 8,6,8 8 8,7,9 9 9,8 8 8,7,9 9 9,9 9 9,6,6,7,8 8 8 8,6,5
-9 9 9,6,9 9 9,7,9 9 9,8 8 8,5,7,8 8 8,9 9 9 9,8 8 8,5,5,6,8 8
-8 8 8,6,5,8 8 8 8,8 8 8,8 8 8,5,8 8 8,6,7,8 8 8,7,6,5,9 9
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_results1.csv b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_results1.csv
deleted file mode 100644
index 3cc252f7f64c7b9fecac6fd7a7793d5560c18285..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_results1.csv
+++ /dev/null
@@ -1,572 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1129.478,6358.145,1949.97,157.113483,78.556742,77.680494,157.113483,59.008124,3203.344,157.113483,39.466907,29.645681,39.466907,1.117251,0.021821,13437.241376,4.35463535782335
-c2,1129.478,86.311660,1949.97,157.113483,78.556742,118.016247,3366.9575,1683.0934,157.113483,77.680494,1299.926,29.645681,39.466907,69.37799,1.812953,10244.52054,5.71176425908397
-c3,1108.984,86.311660,3792.343,6973.454,59.008124,118.016247,6530.41,59.008124,118.016247,3427.0759,2479.9481,29.645681,2353.8847,0.839227,10.00372,27146.94873,2.1554645868097
-c4,1129.478,6358.145,1949.97,157.113483,59.008124,77.680494,157.113483,78.556742,77.680494,77.680494,19.513340,1273.3007,39.466907,1.117251,0.010789,11455.835301,5.10781490784096
-c5,1129.478,6358.145,1949.97,157.113483,78.556742,77.680494,6530.41,59.008124,77.680494,77.680494,39.466907,29.645681,39.466907,1.117251,0.010789,16605.430366,3.52380427371692
-c6,1129.478,6358.145,1949.97,77.680494,78.556742,77.680494,157.113483,1683.0934,118.016247,77.680494,39.466907,29.645681,39.466907,69.37799,0.021821,11885.39366,4.92320978375394
-c7,1129.478,131.129164,1949.97,3356.913,78.556742,3053.2545,157.113483,59.008124,6053.9348,157.113483,1299.926,1273.3007,2353.8847,69.37799,0.016391,21122.977077,2.77017232716201
-c8,1108.984,6358.145,1949.97,157.113483,78.556742,118.016247,157.113483,59.008124,77.680494,6493.6098,39.466907,39.466907,2353.8847,1.117251,0.021821,18992.154959,3.08097141484064
-c9,1129.478,6358.145,1949.97,157.113483,78.556742,77.680494,157.113483,78.556742,77.680494,77.680494,39.466907,29.645681,1286.6509,57.16078,10.00372,11564.90292,5.05964353888719
-c10,1108.984,7471.505,1949.97,157.113483,78.556742,77.680494,157.113483,59.008124,157.113483,77.680494,39.466907,2376.5679,39.466907,1.117251,0.010789,13751.355057,4.25516512190538
-c11,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,6530.41,78.556742,118.016247,6493.6098,39.466907,19.513340,1286.6509,0.839227,10.00372,30334.55093,1.92896498732192
-c12,1129.478,6358.145,1949.97,157.113483,38.840247,77.680494,157.113483,59.008124,6053.9348,118.016247,39.466907,29.645681,2353.8847,1.117251,0.021821,18523.436238,3.15893259626782
-c13,1129.478,6358.145,1949.97,157.113483,78.556742,3053.2545,3366.9575,59.008124,77.680494,77.680494,19.513340,29.645681,39.466907,69.37799,0.021821,16465.870076,3.55367109163099
-c14,1108.984,6358.145,1949.97,77.680494,38.840247,77.680494,6530.41,59.008124,6053.9348,118.016247,39.466907,29.645681,39.466907,1.117251,0.016391,22482.382543,2.60267284709785
-c15,1108.984,6358.145,1949.97,157.113483,78.556742,6007.756,6530.41,59.008124,77.680494,77.680494,39.466907,29.645681,39.466907,1.117251,0.016391,22515.017474,2.59890034065847
-c16,1129.478,6358.145,1949.97,157.113483,38.840247,77.680494,157.113483,59.008124,6053.9348,6493.6098,39.466907,29.645681,2353.8847,1.117251,0.021821,24899.029791,2.35006291807982
-c17,1108.984,6358.145,1949.97,157.113483,78.556742,3053.2545,3366.9575,59.008124,157.113483,77.680494,39.466907,29.645681,39.466907,1.117251,0.016391,16476.496463,3.55137917938216
-c18,1129.478,6358.145,1949.97,157.113483,78.556742,118.016247,157.113483,59.008124,157.113483,77.680494,1299.926,29.645681,39.466907,1.117251,0.021821,11612.372716,5.03896040629841
-c19,1129.478,6358.145,1949.97,157.113483,78.556742,77.680494,157.113483,59.008124,77.680494,77.680494,2479.9481,29.645681,39.466907,1.117251,0.021821,12672.626074,4.6173765437074
-c20,1129.478,6358.145,1949.97,157.113483,38.840247,77.680494,157.113483,3324.8076,77.680494,77.680494,39.466907,29.645681,1286.6509,1.117251,0.010789,14705.400823,3.97910177011772
-c21,1108.984,6358.145,1949.97,77.680494,59.008124,118.016247,3366.9575,59.008124,118.016247,157.113483,39.466907,19.513340,1286.6509,1.117251,10.00372,14729.651337,3.97255068073204
-c22,1108.984,7471.505,1949.97,77.680494,59.008124,118.016247,3366.9575,78.556742,118.016247,157.113483,39.466907,19.513340,1286.6509,1.117251,10.00372,15862.559955,3.68882996440136
-c23,1129.478,6358.145,1949.97,157.113483,78.556742,77.680494,157.113483,3324.8076,157.113483,77.680494,39.466907,2376.5679,39.466907,69.37799,0.021821,15992.560304,3.65884419785369
-c24,1129.478,6358.145,1949.97,3356.913,78.556742,77.680494,3366.9575,3324.8076,77.680494,77.680494,39.466907,1273.3007,39.466907,1.117251,0.021821,21151.24291,2.76647035899192
-c25,1129.478,6358.145,1949.97,157.113483,38.840247,3053.2545,118.016247,1683.0934,157.113483,77.680494,29.645681,2376.5679,39.466907,0.552395,0.021821,17168.959558,3.40814399990362
-c26,1129.478,6358.145,1949.97,157.113483,78.556742,3053.2545,6530.41,3324.8076,77.680494,77.680494,39.466907,29.645681,39.466907,1.117251,0.021821,22846.81488,2.56115729453857
-c27,1108.984,6358.145,1949.97,118.016247,78.556742,77.680494,6530.41,59.008124,157.113483,77.680494,39.466907,2376.5679,39.466907,1.117251,10.00372,18982.187269,3.08258925620765
-c28,1129.478,6358.145,3792.343,3356.913,1731.6018,3053.2545,6530.41,38.840247,3203.344,77.680494,39.466907,29.645681,1286.6509,0.552395,0.016391,30628.342315,1.91046208280416
-c29,1108.984,6358.145,1949.97,157.113483,78.556742,157.113483,6530.41,59.008124,157.113483,77.680494,39.466907,1273.3007,39.466907,1.117251,0.021821,17987.468395,3.25305847564182
-c30,1129.478,7471.505,1949.97,157.113483,38.840247,3053.2545,6530.41,78.556742,3203.344,3427.0759,39.466907,39.466907,2353.8847,0.839227,0.021821,29473.227434,1.9853369223136
-c31,1129.478,6358.145,3792.343,3356.913,1731.6018,3053.2545,6530.41,38.840247,3203.344,77.680494,39.466907,29.645681,1286.6509,0.552395,0.021821,30628.347745,1.91046174410457
-c32,1108.984,6358.145,1949.97,77.680494,3475.891,6007.756,118.016247,59.008124,6053.9348,118.016247,2479.9481,29.645681,19.513340,1.117251,0.021821,27857.648105,2.1004747569645
-c33,1129.478,6358.145,1949.97,157.113483,78.556742,77.680494,3366.9575,38.840247,77.680494,157.113483,2479.9481,29.645681,39.466907,1.117251,10.00372,15951.717102,3.6682124000834
-c34,1129.478,7471.505,1949.97,3356.913,38.840247,77.680494,118.016247,59.008124,77.680494,157.113483,1299.926,1273.3007,2353.8847,0.552395,0.016391,19363.885275,3.02182571884802
-c35,1129.478,7471.505,3792.343,157.113483,59.008124,77.680494,118.016247,3324.8076,118.016247,3427.0759,1299.926,29.645681,39.466907,1.117251,10.00372,21055.203654,2.77908908062139
-c36,1129.478,6358.145,3792.343,77.680494,38.840247,157.113483,157.113483,1683.0934,6053.9348,77.680494,29.645681,2376.5679,2353.8847,1.117251,0.021821,24286.659754,2.40931800398904
-c37,1129.478,6358.145,1949.97,157.113483,78.556742,77.680494,157.113483,3324.8076,77.680494,77.680494,39.466907,1273.3007,1286.6509,69.37799,0.021821,16057.044108,3.64415057248499
-c38,1129.478,6358.145,1949.97,157.113483,78.556742,6007.756,3366.9575,3324.8076,77.680494,3427.0759,1299.926,29.645681,39.466907,1.117251,0.021821,27247.718379,2.14749307866262
-c39,1108.984,7471.505,3792.343,6973.454,1731.6018,6007.756,118.016247,59.008124,157.113483,157.113483,1299.926,1273.3007,2353.8847,0.552395,0.016391,32504.575323,1.800186160918
-c40,1129.478,6358.145,1949.97,157.113483,38.840247,6007.756,3366.9575,3324.8076,77.680494,118.016247,2479.9481,1273.3007,19.513340,1.117251,0.021821,26302.665783,2.22465232624268
-c41,1129.478,7471.505,1949.97,118.016247,78.556742,118.016247,6530.41,1683.0934,118.016247,6493.6098,2479.9481,19.513340,19.513340,0.839227,0.010789,28210.496479,2.07420265287206
-c42,1129.478,6358.145,3792.343,3356.913,1731.6018,3053.2545,3366.9575,59.008124,3203.344,6493.6098,2479.9481,29.645681,1286.6509,1.117251,0.016391,36342.033047,1.61009942966909
-c43,1129.478,6358.145,1949.97,118.016247,78.556742,157.113483,118.016247,1683.0934,118.016247,3427.0759,1299.926,2376.5679,29.645681,0.839227,10.00372,18854.463794,3.10347126133991
-c44,1129.478,7471.505,1949.97,118.016247,78.556742,118.016247,6530.41,1683.0934,118.016247,6493.6098,2479.9481,1273.3007,2353.8847,0.839227,0.010789,31798.655199,1.84014972623198
-c45,1129.478,7471.505,1949.97,6973.454,78.556742,77.680494,6530.41,1683.0934,118.016247,6493.6098,2479.9481,1273.3007,2353.8847,69.37799,0.010789,38682.295962,1.51268908001772
-c46,1129.478,7471.505,1949.97,3356.913,78.556742,118.016247,6530.41,1683.0934,157.113483,6493.6098,2479.9481,1273.3007,2353.8847,69.37799,0.010789,35145.187951,1.66493025327076
-c47,1108.984,6358.145,1949.97,6973.454,78.556742,157.113483,6530.41,1683.0934,118.016247,6493.6098,2479.9481,1273.3007,2353.8847,0.839227,0.010789,37559.336188,1.55791589058764
-c2,10244.52054
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,3486.330184,2.72619671223325
-c2,442.9824,90.532480,426.5549,82.574848,41.518080,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,12.419968,1.04789,2478.077068,3.83540201279156
-c3,352.4661,90.532480,701.7255,1159.5933,41.518080,82.574848,971.2723,41.518080,83.036160,543.5573,344.11179,21.220352,351.94343,0.922624,1.669613,4787.661957,1.98519069325364
-c4,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,0.922624,0.018020,3227.150232,2.94514391342585
-c5,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,971.2723,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,3930.519476,2.41810833764437
-c6,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,290.68457,83.036160,83.036160,21.220352,21.220352,21.220352,12.419968,0.018020,3302.485858,2.87795990017045
-c7,442.9824,90.532480,426.5549,652.9992,41.518080,544.1687,82.574848,41.518080,857.91333,83.036160,208.37216,206.54856,351.94343,12.419968,0.018020,4043.100316,2.35077568699199
-c8,352.4661,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,883.36663,21.220352,21.220352,351.94343,0.922624,0.018020,4082.359272,2.3281689072228
-c9,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,21.220352,210.33129,7.640322,1.669613,3239.302253,2.93409540767249
-c10,352.4661,1550.099,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,344.22248,21.220352,0.922624,0.018020,3213.556852,2.95760190342505
-c11,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,971.2723,41.518080,83.036160,883.36663,21.220352,21.220352,210.33129,0.922624,1.669613,5723.546549,1.66058263186526
-c12,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,857.91333,83.036160,21.220352,21.220352,351.94343,0.922624,0.018020,4147.422272,2.29164558260718
-c13,442.9824,1610.85,426.5549,82.574848,41.518080,544.1687,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,12.419968,0.018020,4022.640272,2.36273225520146
-c14,352.4661,1610.85,426.5549,82.574848,41.518080,82.574848,971.2723,41.518080,857.91333,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,4614.880346,2.05951644234642
-c15,352.4661,1610.85,426.5549,82.574848,41.518080,925.6073,971.2723,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,4683.035628,2.02954295248546
-c16,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,857.91333,883.36663,21.220352,21.220352,351.94343,0.922624,0.018020,4947.752742,1.92095734397236
-c17,352.4661,1610.85,426.5549,82.574848,41.518080,544.1687,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,3920.626628,2.42420990759516
-c18,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,3228.973832,2.9434806096787
-c19,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,344.11179,21.220352,21.220352,0.922624,0.018020,3364.713462,2.82473440394449
-c20,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,490.07014,83.036160,83.036160,21.220352,21.220352,210.33129,0.922624,0.018020,3679.485022,2.58308481835465
-c21,352.4661,1610.85,426.5549,82.574848,41.518080,82.574848,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,210.33129,0.922624,1.669613,3649.795307,2.60409724330611
-c22,352.4661,1550.099,426.5549,82.574848,41.518080,82.574848,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,210.33129,0.922624,1.669613,3589.044307,2.64817624977355
-c23,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,490.07014,83.036160,83.036160,21.220352,344.22248,21.220352,12.419968,0.018020,3824.873556,2.48489832941033
-c24,442.9824,1610.85,426.5549,652.9992,41.518080,82.574848,590.3019,490.07014,83.036160,83.036160,21.220352,206.54856,21.220352,0.922624,0.018020,4753.853696,1.99930889039903
-c25,442.9824,1610.85,426.5549,82.574848,41.518080,544.1687,82.574848,290.68457,83.036160,83.036160,21.220352,344.22248,21.220352,0.922624,0.018020,4075.584494,2.33203898454034
-c26,442.9824,1610.85,426.5549,82.574848,41.518080,544.1687,971.2723,490.07014,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,4840.665388,1.96345361635945
-c27,352.4661,1610.85,426.5549,82.574848,41.518080,82.574848,971.2723,41.518080,83.036160,83.036160,21.220352,344.22248,21.220352,0.922624,1.669613,4164.656897,2.28216205196406
-c28,442.9824,1610.85,701.7255,652.9992,333.7788,544.1687,971.2723,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,0.922624,0.018020,6163.588098,1.54202744451423
-c29,352.4661,1610.85,426.5549,82.574848,41.518080,82.574848,971.2723,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,0.922624,0.018020,4025.331384,2.3611526642659
-c30,442.9824,1550.099,426.5549,82.574848,41.518080,544.1687,971.2723,41.518080,527.54432,543.5573,21.220352,21.220352,351.94343,0.922624,0.018020,5567.114706,1.70724378591161
-c31,442.9824,1610.85,701.7255,652.9992,333.7788,544.1687,971.2723,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,0.922624,0.018020,6163.588098,1.54202744451423
-c32,352.4661,1610.85,426.5549,82.574848,558.563,925.6073,82.574848,41.518080,857.91333,83.036160,344.11179,21.220352,21.220352,0.922624,0.018020,5409.151704,1.75710028159528
-c33,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,590.3019,41.518080,83.036160,83.036160,344.11179,21.220352,21.220352,0.922624,1.669613,3874.092107,2.45332884458111
-c34,442.9824,1550.099,426.5549,652.9992,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,208.37216,206.54856,351.94343,0.922624,0.018020,4254.69847,2.23386498517567
-c35,442.9824,1550.099,701.7255,82.574848,41.518080,82.574848,82.574848,490.07014,83.036160,543.5573,208.37216,21.220352,21.220352,0.922624,1.669613,4354.118225,2.18285803199893
-c36,442.9824,1610.85,701.7255,82.574848,41.518080,82.574848,82.574848,290.68457,857.91333,83.036160,21.220352,344.22248,351.94343,0.922624,0.018020,4994.76149,1.9028780426735
-c37,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,490.07014,83.036160,83.036160,21.220352,206.54856,210.33129,12.419968,0.018020,3876.310574,2.45192477005263
-c38,442.9824,1610.85,426.5549,82.574848,41.518080,925.6073,590.3019,490.07014,83.036160,543.5573,208.37216,21.220352,21.220352,0.922624,0.018020,5488.806536,1.73160083571944
-c39,352.4661,1550.099,701.7255,1159.5933,333.7788,925.6073,82.574848,41.518080,83.036160,83.036160,208.37216,206.54856,351.94343,0.922624,0.018020,6081.240042,1.56290854103225
-c40,442.9824,1610.85,426.5549,82.574848,41.518080,925.6073,590.3019,490.07014,83.036160,83.036160,344.11179,206.54856,21.220352,0.922624,0.018020,5349.353234,1.77674226482494
-c41,442.9824,1550.099,426.5549,82.574848,41.518080,82.574848,971.2723,290.68457,83.036160,883.36663,344.11179,21.220352,21.220352,0.922624,0.018020,5242.156874,1.81307469523327
-c42,442.9824,1610.85,701.7255,652.9992,333.7788,544.1687,590.3019,41.518080,527.54432,883.36663,344.11179,21.220352,210.33129,0.922624,0.018020,6905.839606,1.37628768732386
-c43,442.9824,1610.85,426.5549,82.574848,41.518080,82.574848,82.574848,290.68457,83.036160,543.5573,208.37216,344.22248,21.220352,0.922624,1.669613,4263.315183,2.22935005438114
-c44,442.9824,1550.099,426.5549,82.574848,41.518080,82.574848,971.2723,290.68457,83.036160,883.36663,344.11179,206.54856,351.94343,0.922624,0.018020,5758.20816,1.65058673268618
-c45,442.9824,1550.099,426.5549,1159.5933,41.518080,82.574848,971.2723,290.68457,83.036160,883.36663,344.11179,206.54856,351.94343,12.419968,0.018020,6846.723956,1.38817076316534
-c46,442.9824,1550.099,426.5549,652.9992,41.518080,82.574848,971.2723,290.68457,83.036160,883.36663,344.11179,206.54856,351.94343,12.419968,0.018020,6340.129856,1.49908948617141
-c47,352.4661,1610.85,426.5549,1159.5933,41.518080,82.574848,971.2723,290.68457,83.036160,883.36663,344.11179,206.54856,351.94343,0.922624,0.018020,6805.461312,1.39658747329622
-c2,2478.077068
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1145.5648,6358.145,1949.97,375.615141,179.180675,272.76268,352.195669,152.266559,3203.344,357.330128,89.882488,80.061262,89.882488,3.135222,0.543719,14609.879831,4.00511757244776
-c2,1145.5648,384.623163,1949.97,375.615141,179.180675,313.098433,3366.9575,1683.0934,357.330128,277.897139,1299.926,80.061262,89.882488,69.37799,1.812953,11574.391072,5.0554958764962
-c3,1108.984,384.623163,3792.343,6973.454,159.632057,313.098433,6530.41,152.266559,318.232892,3427.0759,2492.7759,80.061262,2353.8847,2.857198,10.00372,28099.702784,2.08238098048781
-c4,1145.5648,6358.145,1949.97,375.615141,159.632057,272.76268,352.195669,171.815177,277.897139,277.897139,69.928921,1273.3007,89.882488,3.135222,0.532687,12778.27482,4.57920080835137
-c5,1145.5648,6358.145,1949.97,375.615141,179.180675,272.76268,6530.41,152.266559,277.897139,277.897139,89.882488,80.061262,89.882488,3.135222,0.532687,17783.20328,3.29042442987569
-c6,1145.5648,6358.145,1949.97,296.182152,179.180675,272.76268,352.195669,1683.0934,318.232892,277.897139,89.882488,80.061262,89.882488,69.37799,0.543719,13162.972354,4.44537030275548
-c7,1145.5648,429.440667,1949.97,3356.913,179.180675,3053.2545,352.195669,152.266559,6053.9348,357.330128,1299.926,1273.3007,2365.9264,69.37799,0.538289,22039.120177,2.65501917079991
-c8,1108.984,6756.043,1949.97,375.615141,179.180675,313.098433,352.195669,152.266559,277.897139,6493.6098,89.882488,89.882488,2353.8847,3.135222,0.543719,20496.189033,2.85488616753584
-c9,1145.5648,6358.145,1949.97,375.615141,179.180675,272.76268,352.195669,171.815177,277.897139,277.897139,89.882488,80.061262,1286.6509,57.16078,10.00372,12884.80257,4.54134132602902
-c10,1108.984,7471.505,2073.98,375.615141,179.180675,272.76268,352.195669,152.266559,357.330128,277.897139,89.882488,2376.5679,89.882488,3.135222,0.532687,15181.717776,3.85425992769252
-c11,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,6530.41,171.815177,318.232892,6493.6098,89.882488,69.928921,1286.6509,2.857198,10.00372,31502.999129,1.85741955607626
-c12,1145.5648,6358.145,1949.97,375.615141,139.46418,272.76268,352.195669,152.266559,6053.9348,318.232892,89.882488,80.061262,2353.8847,3.135222,0.543719,19645.659112,2.97848426522935
-c13,1145.5648,6358.145,1949.97,375.615141,179.180675,3053.2545,3366.9575,152.266559,277.897139,277.897139,69.928921,80.061262,89.882488,69.37799,0.543719,17446.542833,3.35391871430991
-c14,1108.984,6756.043,1949.97,296.182152,139.46418,272.76268,6530.41,152.266559,6053.9348,318.232892,89.882488,80.061262,89.882488,3.135222,0.538289,23841.750012,2.4542781703575
-c15,1108.984,6756.043,1949.97,375.615141,179.180675,6007.756,6530.41,152.266559,277.897139,277.897139,89.882488,80.061262,89.882488,3.135222,0.538289,23879.519402,2.45039632552486
-c16,1145.5648,6358.145,1949.97,375.615141,139.46418,272.76268,352.195669,152.266559,6053.9348,6493.6098,89.882488,80.061262,2353.8847,3.135222,0.543719,25821.03602,2.26614790247232
-c17,1108.984,6756.043,1949.97,375.615141,179.180675,3053.2545,3366.9575,152.266559,357.330128,277.897139,89.882488,80.061262,89.882488,3.135222,0.538289,17840.998391,3.27976524814561
-c18,1145.5648,6358.145,1949.97,375.615141,179.180675,313.098433,352.195669,152.266559,357.330128,277.897139,1299.926,80.061262,89.882488,3.135222,0.543719,12934.812235,4.5237832082548
-c19,1145.5648,6358.145,1949.97,375.615141,179.180675,272.76268,352.195669,152.266559,277.897139,277.897139,2479.9481,80.061262,89.882488,3.135222,0.543719,13995.065593,4.18106553599584
-c20,1145.5648,6358.145,1949.97,375.615141,139.46418,272.76268,352.195669,3324.8076,277.897139,277.897139,89.882488,80.061262,1286.6509,3.135222,0.532687,15934.581907,3.67215699899093
-c21,1108.984,6756.043,1949.97,296.182152,159.632057,313.098433,3366.9575,152.266559,318.232892,357.330128,89.882488,69.928921,1286.6509,3.135222,10.00372,16238.297972,3.60347411924266
-c22,1108.984,7471.505,2073.98,296.182152,159.632057,313.098433,3366.9575,171.815177,318.232892,357.330128,89.882488,69.928921,1286.6509,3.135222,10.00372,17097.31859,3.42242476168057
-c23,1145.5648,6358.145,1949.97,375.615141,179.180675,272.76268,352.195669,3324.8076,357.330128,277.897139,89.882488,2376.5679,89.882488,69.37799,0.543719,17219.723417,3.39809676881469
-c24,1145.5648,6358.145,1949.97,3356.913,179.180675,272.76268,3366.9575,3383.8134,277.897139,277.897139,89.882488,1273.3007,89.882488,3.135222,0.543719,22025.84595,2.6566192603984
-c25,1145.5648,6358.145,1949.97,375.615141,139.46418,3053.2545,313.098433,1683.0934,357.330128,277.897139,80.061262,2376.5679,89.882488,2.570366,0.543719,18203.058456,3.2145304956849
-c26,1145.5648,6358.145,1949.97,375.615141,179.180675,3053.2545,6654.867,3324.8076,277.897139,277.897139,89.882488,80.061262,89.882488,3.135222,0.543719,23860.704173,2.45232857226318
-c27,1108.984,6756.043,1949.97,336.517905,179.180675,272.76268,6530.41,152.266559,357.330128,277.897139,89.882488,2376.5679,89.882488,3.135222,10.00372,20490.833904,2.85563227107191
-c28,1145.5648,6358.145,3996.263,3610.731,1731.6018,3053.2545,6654.867,132.098682,3203.344,277.897139,89.882488,80.061262,1286.6509,2.570366,0.538289,31623.470226,1.85034362894989
-c29,1108.984,6756.043,1949.97,375.615141,179.180675,352.195669,6530.41,152.266559,357.330128,277.897139,89.882488,1273.3007,89.882488,3.135222,0.543719,19496.636928,3.0012502545421
-c30,1145.5648,7908.45,2073.98,375.615141,139.46418,3053.2545,6654.867,171.815177,3203.344,3427.0759,89.882488,89.882488,2353.8847,2.857198,0.543719,30690.481291,1.90659397281919
-c31,1145.5648,6358.145,3996.263,3610.731,1731.6018,3053.2545,6654.867,132.098682,3203.344,277.897139,89.882488,80.061262,1286.6509,2.570366,0.543719,31623.475656,1.85034331123131
-c32,1108.984,6756.043,1949.97,296.182152,3475.891,6007.756,313.098433,152.266559,6053.9348,318.232892,2479.9481,80.061262,69.928921,3.135222,0.543719,29065.97606,2.01315402314016
-c33,1145.5648,6358.145,1949.97,375.615141,179.180675,272.76268,3366.9575,132.098682,277.897139,357.330128,2479.9481,80.061262,89.882488,3.135222,10.00372,17078.552537,3.42618534993599
-c34,1145.5648,7908.45,2073.98,3356.913,139.46418,272.76268,313.098433,152.266559,277.897139,357.330128,1299.926,1273.3007,2365.9264,2.570366,0.538289,20939.988674,2.79438004836248
-c35,1145.5648,7908.45,3792.343,375.615141,159.632057,272.76268,313.098433,3324.8076,318.232892,3427.0759,1299.926,80.061262,89.882488,3.135222,10.00372,22520.591195,2.59825712728916
-c36,1145.5648,6358.145,3996.263,296.182152,139.46418,352.195669,352.195669,1683.0934,6117.8413,277.897139,80.061262,2376.5679,2353.8847,3.135222,0.543719,25533.035112,2.29170900980466
-c37,1145.5648,6358.145,1949.97,375.615141,179.180675,272.76268,352.195669,3324.8076,277.897139,277.897139,89.882488,1273.3007,1286.6509,70.74902,0.543719,17235.16267,3.39505275487456
-c38,1145.5648,6358.145,1949.97,375.615141,179.180675,6007.756,3519.4585,3383.8134,277.897139,3427.0759,1299.926,80.061262,89.882488,3.135222,0.543719,28098.025246,2.08250530499753
-c39,1108.984,7471.505,3792.343,6973.454,1802.5061,6130.47,313.098433,152.266559,357.330128,357.330128,1299.926,1273.3007,2365.9264,2.570366,0.538289,33401.549103,1.75184349945494
-c40,1145.5648,6358.145,1949.97,375.615141,139.46418,6007.756,3519.4585,3383.8134,277.897139,318.232892,2479.9481,1307.6007,69.928921,3.135222,0.543719,27337.073714,2.14047367472935
-c41,1145.5648,7908.45,2073.98,336.517905,179.180675,313.098433,6530.41,1732.6793,318.232892,6493.6098,2479.9481,69.928921,69.928921,2.857198,0.532687,29654.919632,1.97317299698702
-c42,1145.5648,6358.145,3996.263,3610.731,1731.6018,3053.2545,3366.9575,152.266559,3203.344,6559.316,2479.9481,80.061262,1286.6509,3.135222,0.538289,37027.777932,1.58028080411498
-c43,1145.5648,6358.145,1949.97,336.517905,179.180675,352.195669,313.098433,1683.0934,318.232892,3427.0759,1299.926,2388.8133,80.061262,2.857198,10.00372,19844.736154,2.94860491437399
-c44,1145.5648,7908.45,2073.98,336.517905,179.180675,313.098433,6530.41,1732.6793,318.232892,6493.6098,2479.9481,1307.6007,2365.9264,2.857198,0.532687,33188.58889,1.76308450053815
-c45,1145.5648,7908.45,2073.98,7209.476,179.180675,272.76268,6530.41,1732.6793,318.232892,6493.6098,2479.9481,1307.6007,2365.9264,69.37799,0.532687,40087.732024,1.45965570369515
-c46,1145.5648,7908.45,2073.98,3356.913,179.180675,313.098433,6530.41,1732.6793,357.330128,6493.6098,2479.9481,1307.6007,2365.9264,69.37799,0.532687,36314.602013,1.61131565371201
-c47,1108.984,6756.043,1949.97,7209.476,179.180675,352.195669,6530.41,1732.6793,318.232892,6493.6098,2479.9481,1307.6007,2365.9264,2.857198,0.532687,38787.646421,1.5085804912479
-c2,11574.391072
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,157.595858,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,882.906247,0
-c2,0,188.762345,0,157.595858,76.708720,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,0,0,958.1919,0
-c3,0,188.762345,0,0,76.708720,151.959867,0,74.930729,153.127903,0,0,38.454652,0,1.156348,0,685.100564,0
-c4,0,0,0,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,0,38.454652,1.156348,0.102999,997.579498,0
-c5,0,0,0,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,884.074283,0
-c6,0,0,0,157.595858,76.708720,151.959867,151.959867,0,153.127903,153.127903,38.454652,38.454652,38.454652,0,0.102999,959.947073,0
-c7,0,188.762345,0,0,76.708720,0,151.959867,74.930729,0,153.127903,0,0,0,0,0.102999,645.592563,0
-c8,0,0,0,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,0,38.454652,38.454652,0,1.156348,0.102999,844.451595,0
-c9,0,0,0,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,38.454652,0,0,0,996.320151,0
-c10,0,0,0,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,0,38.454652,1.156348,0.102999,997.579498,0
-c11,0,0,0,0,0,151.959867,0,74.930729,153.127903,0,38.454652,38.454652,0,1.156348,0,458.084151,0
-c12,0,0,0,157.595858,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,1.156348,0.102999,844.451595,0
-c13,0,0,0,157.595858,76.708720,0,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,0,0.102999,730.958068,0
-c14,0,0,0,157.595858,76.708720,151.959867,0,74.930729,0,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,730.94638,0
-c15,0,0,0,157.595858,76.708720,0,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,732.114416,0
-c16,0,0,0,157.595858,76.708720,151.959867,151.959867,74.930729,0,0,38.454652,38.454652,0,1.156348,0.102999,691.323692,0
-c17,0,0,0,157.595858,76.708720,0,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,732.114416,0
-c18,0,0,0,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,997.579498,0
-c19,0,0,0,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,997.579498,0
-c20,0,0,0,157.595858,76.708720,151.959867,151.959867,0,153.127903,153.127903,38.454652,38.454652,0,1.156348,0.102999,922.648769,0
-c21,0,0,0,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,0,1.156348,0,845.516632,0
-c22,0,0,0,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,0,1.156348,0,845.516632,0
-c23,0,0,0,157.595858,76.708720,151.959867,151.959867,0,153.127903,153.127903,38.454652,0,38.454652,0,0.102999,921.492421,0
-c24,0,0,0,0,76.708720,151.959867,0,0,153.127903,153.127903,38.454652,0,38.454652,1.156348,0.102999,613.093044,0
-c25,0,0,0,157.595858,76.708720,0,151.959867,0,153.127903,153.127903,38.454652,0,38.454652,1.156348,0.102999,770.688902,0
-c26,0,0,0,157.595858,76.708720,0,0,0,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,657.183687,0
-c27,0,0,0,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,0,38.454652,1.156348,0,845.516632,0
-c28,0,0,0,0,0,0,0,74.930729,0,153.127903,38.454652,38.454652,0,1.156348,0.102999,306.227283,0
-c29,0,0,0,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,0,38.454652,1.156348,0.102999,845.619631,0
-c30,0,0,0,157.595858,76.708720,0,0,74.930729,0,0,38.454652,38.454652,0,1.156348,0.102999,387.403958,0
-c31,0,0,0,0,0,0,0,74.930729,0,153.127903,38.454652,38.454652,0,1.156348,0.102999,306.227283,0
-c32,0,0,0,157.595858,0,0,151.959867,74.930729,0,153.127903,0,38.454652,38.454652,1.156348,0.102999,615.783008,0
-c33,0,0,0,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,0,38.454652,38.454652,1.156348,0,845.516632,0
-c34,0,0,0,0,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,0,0,1.156348,0.102999,763.074336,0
-c35,0,0,0,157.595858,76.708720,151.959867,151.959867,0,153.127903,0,0,38.454652,38.454652,1.156348,0,769.417867,0
-c36,0,0,0,157.595858,76.708720,151.959867,151.959867,0,0,153.127903,38.454652,0,0,1.156348,0.102999,731.066214,0
-c37,0,0,0,157.595858,76.708720,151.959867,151.959867,0,153.127903,153.127903,38.454652,0,0,0,0.102999,883.037769,0
-c38,0,0,0,157.595858,76.708720,0,0,0,153.127903,0,0,38.454652,38.454652,1.156348,0.102999,465.601132,0
-c39,0,0,0,0,0,0,151.959867,74.930729,153.127903,153.127903,0,0,0,1.156348,0.102999,534.405749,0
-c40,0,0,0,157.595858,76.708720,0,0,0,153.127903,153.127903,0,0,38.454652,1.156348,0.102999,580.274383,0
-c41,0,0,0,157.595858,76.708720,151.959867,0,0,153.127903,0,0,38.454652,38.454652,1.156348,0.102999,617.560999,0
-c42,0,0,0,0,0,0,0,74.930729,0,0,0,38.454652,0,1.156348,0.102999,114.644728,0
-c43,0,0,0,157.595858,76.708720,151.959867,151.959867,0,153.127903,0,0,0,38.454652,1.156348,0,730.963215,0
-c44,0,0,0,157.595858,76.708720,151.959867,0,0,153.127903,0,0,0,0,1.156348,0.102999,540.651695,0
-c45,0,0,0,0,76.708720,151.959867,0,0,153.127903,0,0,0,0,0,0.102999,381.899489,0
-c46,0,0,0,0,76.708720,151.959867,0,0,153.127903,0,0,0,0,0,0.102999,381.899489,0
-c47,0,0,0,0,76.708720,151.959867,0,0,153.127903,0,0,0,0,1.156348,0.102999,383.055837,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,60.905800,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,273.645408,0
-c2,0,109.549158,0,60.905800,23.915213,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0,0,355.591832,0
-c3,0,109.549158,0,0,23.915213,43.122319,0,18.327706,47.088742,0,0,11.960929,0,0.861623,0,254.82569,0
-c4,0,0,0,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,0,11.960929,0.861623,0.418899,308.773221,0
-c5,0,0,0,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,277.611831,0
-c6,0,0,0,60.905800,23.915213,43.122319,43.122319,0,47.088742,47.088742,11.960929,11.960929,11.960929,0,0.418899,301.544821,0
-c7,0,109.549158,0,0,23.915213,0,43.122319,18.327706,0,47.088742,0,0,0,0,0.418899,242.422037,0
-c8,0,0,0,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,0,11.960929,11.960929,0,0.861623,0.418899,261.684479,0
-c9,0,0,0,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,11.960929,0,0,0,307.492699,0
-c10,0,0,0,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,0,11.960929,0.861623,0.418899,308.773221,0
-c11,0,0,0,0,0,43.122319,0,18.327706,47.088742,0,11.960929,11.960929,0,0.861623,0,133.322248,0
-c12,0,0,0,60.905800,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0.861623,0.418899,261.684479,0
-c13,0,0,0,60.905800,23.915213,0,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0,0.418899,233.627889,0
-c14,0,0,0,60.905800,23.915213,43.122319,0,18.327706,0,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,230.523089,0
-c15,0,0,0,60.905800,23.915213,0,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,234.489512,0
-c16,0,0,0,60.905800,23.915213,43.122319,43.122319,18.327706,0,0,11.960929,11.960929,0,0.861623,0.418899,214.595737,0
-c17,0,0,0,60.905800,23.915213,0,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,234.489512,0
-c18,0,0,0,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,308.773221,0
-c19,0,0,0,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,308.773221,0
-c20,0,0,0,60.905800,23.915213,43.122319,43.122319,0,47.088742,47.088742,11.960929,11.960929,0,0.861623,0.418899,290.445515,0
-c21,0,0,0,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,0,0.861623,0,265.232003,0
-c22,0,0,0,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,0,0.861623,0,265.232003,0
-c23,0,0,0,60.905800,23.915213,43.122319,43.122319,0,47.088742,47.088742,11.960929,0,11.960929,0,0.418899,289.583892,0
-c24,0,0,0,0,23.915213,43.122319,0,0,47.088742,47.088742,11.960929,0,11.960929,0.861623,0.418899,186.417396,0
-c25,0,0,0,60.905800,23.915213,0,43.122319,0,47.088742,47.088742,11.960929,0,11.960929,0.861623,0.418899,247.323196,0
-c26,0,0,0,60.905800,23.915213,0,0,0,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,216.161806,0
-c27,0,0,0,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,0,11.960929,0.861623,0,265.232003,0
-c28,0,0,0,0,0,0,0,18.327706,0,47.088742,11.960929,11.960929,0,0.861623,0.418899,90.618828,0
-c29,0,0,0,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,0,11.960929,0.861623,0.418899,265.650902,0
-c30,0,0,0,60.905800,23.915213,0,0,18.327706,0,0,11.960929,11.960929,0,0.861623,0.418899,128.351099,0
-c31,0,0,0,0,0,0,0,18.327706,0,47.088742,11.960929,11.960929,0,0.861623,0.418899,90.618828,0
-c32,0,0,0,60.905800,0,0,43.122319,18.327706,0,47.088742,0,11.960929,11.960929,0.861623,0.418899,194.646947,0
-c33,0,0,0,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,0,11.960929,11.960929,0.861623,0,265.232003,0
-c34,0,0,0,0,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,0,0,0.861623,0.418899,223.945563,0
-c35,0,0,0,60.905800,23.915213,43.122319,43.122319,0,47.088742,0,0,11.960929,11.960929,0.861623,0,242.937874,0
-c36,0,0,0,60.905800,23.915213,43.122319,43.122319,0,0,47.088742,11.960929,0,0,0.861623,0.418899,231.395844,0
-c37,0,0,0,60.905800,23.915213,43.122319,43.122319,0,47.088742,47.088742,11.960929,0,0,0,0.418899,277.622963,0
-c38,0,0,0,60.905800,23.915213,0,0,0,47.088742,0,0,11.960929,11.960929,0.861623,0.418899,157.112135,0
-c39,0,0,0,0,0,0,43.122319,18.327706,47.088742,47.088742,0,0,0,0.861623,0.418899,156.908031,0
-c40,0,0,0,60.905800,23.915213,0,0,0,47.088742,47.088742,0,0,11.960929,0.861623,0.418899,192.239948,0
-c41,0,0,0,60.905800,23.915213,43.122319,0,0,47.088742,0,0,11.960929,11.960929,0.861623,0.418899,200.234454,0
-c42,0,0,0,0,0,0,0,18.327706,0,0,0,11.960929,0,0.861623,0.418899,31.569157,0
-c43,0,0,0,60.905800,23.915213,43.122319,43.122319,0,47.088742,0,0,0,11.960929,0.861623,0,230.976945,0
-c44,0,0,0,60.905800,23.915213,43.122319,0,0,47.088742,0,0,0,0,0.861623,0.418899,176.312596,0
-c45,0,0,0,0,23.915213,43.122319,0,0,47.088742,0,0,0,0,0,0.418899,114.545173,0
-c46,0,0,0,0,23.915213,43.122319,0,0,47.088742,0,0,0,0,0,0.418899,114.545173,0
-c47,0,0,0,0,23.915213,43.122319,0,0,47.088742,0,0,0,0,0.861623,0.418899,115.406796,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,21.882908,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,110.188861,0
-c2,0,32.936441,0,21.882908,9.597736,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0,0,130.345432,0
-c3,0,32.936441,0,0,9.597736,18.163317,0,8.429972,19.033555,0,0,4.848960,0,0.281905,0,93.291886,0
-c4,0,0,0,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,0,4.848960,0.281905,0.089271,124.373456,0
-c5,0,0,0,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,111.059099,0
-c6,0,0,0,21.882908,9.597736,18.163317,18.163317,0,19.033555,19.033555,4.848960,4.848960,4.848960,0,0.089271,120.510539,0
-c7,0,32.936441,0,0,9.597736,0,18.163317,8.429972,0,19.033555,0,0,0,0,0.089271,88.250292,0
-c8,0,0,0,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,0,4.848960,4.848960,0,0.281905,0.089271,105.339901,0
-c9,0,0,0,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,4.848960,0,0,0,124.00228,0
-c10,0,0,0,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,0,4.848960,0.281905,0.089271,124.373456,0
-c11,0,0,0,0,0,18.163317,0,8.429972,19.033555,0,4.848960,4.848960,0,0.281905,0,55.606669,0
-c12,0,0,0,21.882908,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0.281905,0.089271,105.339901,0
-c13,0,0,0,21.882908,9.597736,0,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0,0.089271,92.613877,0
-c14,0,0,0,21.882908,9.597736,18.163317,0,8.429972,0,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,92.025544,0
-c15,0,0,0,21.882908,9.597736,0,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,92.895782,0
-c16,0,0,0,21.882908,9.597736,18.163317,18.163317,8.429972,0,0,4.848960,4.848960,0,0.281905,0.089271,86.306346,0
-c17,0,0,0,21.882908,9.597736,0,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,92.895782,0
-c18,0,0,0,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,124.373456,0
-c19,0,0,0,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,124.373456,0
-c20,0,0,0,21.882908,9.597736,18.163317,18.163317,0,19.033555,19.033555,4.848960,4.848960,0,0.281905,0.089271,115.943484,0
-c21,0,0,0,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,0,0.281905,0,106.120868,0
-c22,0,0,0,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,0,0.281905,0,106.120868,0
-c23,0,0,0,21.882908,9.597736,18.163317,18.163317,0,19.033555,19.033555,4.848960,0,4.848960,0,0.089271,115.661579,0
-c24,0,0,0,0,9.597736,18.163317,0,0,19.033555,19.033555,4.848960,0,4.848960,0.281905,0.089271,75.897259,0
-c25,0,0,0,21.882908,9.597736,0,18.163317,0,19.033555,19.033555,4.848960,0,4.848960,0.281905,0.089271,97.780167,0
-c26,0,0,0,21.882908,9.597736,0,0,0,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,84.46581,0
-c27,0,0,0,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,0,4.848960,0.281905,0,106.120868,0
-c28,0,0,0,0,0,0,0,8.429972,0,19.033555,4.848960,4.848960,0,0.281905,0.089271,37.532623,0
-c29,0,0,0,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,0,4.848960,0.281905,0.089271,106.210139,0
-c30,0,0,0,21.882908,9.597736,0,0,8.429972,0,0,4.848960,4.848960,0,0.281905,0.089271,49.979712,0
-c31,0,0,0,0,0,0,0,8.429972,0,19.033555,4.848960,4.848960,0,0.281905,0.089271,37.532623,0
-c32,0,0,0,21.882908,0,0,18.163317,8.429972,0,19.033555,0,4.848960,4.848960,0.281905,0.089271,77.578848,0
-c33,0,0,0,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,0,4.848960,4.848960,0.281905,0,106.120868,0
-c34,0,0,0,0,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,0,0,0.281905,0.089271,92.792628,0
-c35,0,0,0,21.882908,9.597736,18.163317,18.163317,0,19.033555,0,0,4.848960,4.848960,0.281905,0,96.820658,0
-c36,0,0,0,21.882908,9.597736,18.163317,18.163317,0,0,19.033555,4.848960,0,0,0.281905,0.089271,92.060969,0
-c37,0,0,0,21.882908,9.597736,18.163317,18.163317,0,19.033555,19.033555,4.848960,0,0,0,0.089271,110.812619,0
-c38,0,0,0,21.882908,9.597736,0,0,0,19.033555,0,0,4.848960,4.848960,0.281905,0.089271,60.583295,0
-c39,0,0,0,0,0,0,18.163317,8.429972,19.033555,19.033555,0,0,0,0.281905,0.089271,65.031575,0
-c40,0,0,0,21.882908,9.597736,0,0,0,19.033555,19.033555,0,0,4.848960,0.281905,0.089271,74.76789,0
-c41,0,0,0,21.882908,9.597736,18.163317,0,0,19.033555,0,0,4.848960,4.848960,0.281905,0.089271,78.746612,0
-c42,0,0,0,0,0,0,0,8.429972,0,0,0,4.848960,0,0.281905,0.089271,13.650108,0
-c43,0,0,0,21.882908,9.597736,18.163317,18.163317,0,19.033555,0,0,0,4.848960,0.281905,0,91.971698,0
-c44,0,0,0,21.882908,9.597736,18.163317,0,0,19.033555,0,0,0,0,0.281905,0.089271,69.048692,0
-c45,0,0,0,0,9.597736,18.163317,0,0,19.033555,0,0,0,0,0,0.089271,46.883879,0
-c46,0,0,0,0,9.597736,18.163317,0,0,19.033555,0,0,0,0,0,0.089271,46.883879,0
-c47,0,0,0,0,9.597736,18.163317,0,0,19.033555,0,0,0,0,0.281905,0.089271,47.165784,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c2,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c3,0,0,0,0,0,0,0,0,0,0,12.8278,0,0,0,0,12.8278,0
-c4,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c5,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c6,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c7,16.0868,0,0,0,0,0,0,0,0,0,0,0,12.0417,0,0,28.1285,0
-c8,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c9,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c10,0,0,124.01,0,0,0,0,0,0,0,0,0,0,0,0,124.01,0
-c11,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c12,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c13,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c14,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c15,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c16,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c17,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c18,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c19,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c20,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c21,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c22,0,0,124.01,0,0,0,0,0,0,0,0,0,0,0,0,124.01,0
-c23,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c24,16.0868,0,0,0,0,0,0,59.0058,0,0,0,0,0,0,0,75.0926,0
-c25,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c26,16.0868,0,0,0,0,0,124.457,0,0,0,0,0,0,0,0,140.5438,0
-c27,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c28,16.0868,0,203.92,253.818,0,0,124.457,0,0,0,0,0,0,0,0,598.2818,0
-c29,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c30,16.0868,436.945,124.01,0,0,0,124.457,0,0,0,0,0,0,0,0,701.4988,0
-c31,16.0868,0,203.92,253.818,0,0,124.457,0,0,0,0,0,0,0,0,598.2818,0
-c32,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c33,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c34,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,12.0417,0,0,589.0835,0
-c35,16.0868,436.945,0,0,0,0,0,0,0,0,0,0,0,0,0,453.0318,0
-c36,16.0868,0,203.92,0,0,0,0,0,63.9065,0,0,0,0,0,0,283.9133,0
-c37,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,1.37103,0,17.45783,0
-c38,16.0868,0,0,0,0,0,152.501,59.0058,0,0,0,0,0,0,0,227.5936,0
-c39,0,0,0,0,70.9043,122.714,0,0,0,0,0,0,12.0417,0,0,205.66,0
-c40,16.0868,0,0,0,0,0,152.501,59.0058,0,0,0,34.3,0,0,0,261.8936,0
-c41,16.0868,436.945,124.01,0,0,0,0,49.5859,0,0,0,0,0,0,0,626.6277,0
-c42,16.0868,0,203.92,253.818,0,0,0,0,0,65.7062,0,0,0,0,0,539.531,0
-c43,16.0868,0,0,0,0,0,0,0,0,0,0,12.2454,0,0,0,28.3322,0
-c44,16.0868,436.945,124.01,0,0,0,0,49.5859,0,0,0,34.3,12.0417,0,0,672.9694,0
-c45,16.0868,436.945,124.01,236.022,0,0,0,49.5859,0,0,0,34.3,12.0417,0,0,908.9914,0
-c46,16.0868,436.945,124.01,0,0,0,0,49.5859,0,0,0,34.3,12.0417,0,0,672.9694,0
-c47,0,397.898,0,236.022,0,0,0,49.5859,0,0,0,34.3,12.0417,0,0,729.8476,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c2,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c3,0,0,0,0,0,0,0,0,0,0,2.77964,0,0,0,0,2.77964,0
-c4,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c5,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c6,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c7,7.62006,0,0,0,0,0,0,0,0,0,0,0,2.76826,0,0,10.38832,0
-c8,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c9,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c10,0,0,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,28.5399,0
-c11,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c12,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c13,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c14,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c15,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c16,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c17,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c18,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c19,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c20,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c21,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c22,0,0,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,28.5399,0
-c23,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c24,7.62006,0,0,0,0,0,0,10.5597,0,0,0,0,0,0,0,18.17976,0
-c25,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c26,7.62006,0,0,0,0,0,20.8436,0,0,0,0,0,0,0,0,28.46366,0
-c27,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c28,7.62006,0,41.6322,52.8973,0,0,20.8436,0,0,0,0,0,0,0,0,122.99316,0
-c29,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c30,7.62006,83.3307,28.5399,0,0,0,20.8436,0,0,0,0,0,0,0,0,140.33426,0
-c31,7.62006,0,41.6322,52.8973,0,0,20.8436,0,0,0,0,0,0,0,0,122.99316,0
-c32,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c33,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c34,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,2.76826,0,0,122.25892,0
-c35,7.62006,83.3307,0,0,0,0,0,0,0,0,0,0,0,0,0,90.95076,0
-c36,7.62006,0,41.6322,0,0,0,0,0,10.5851,0,0,0,0,0,0,59.83736,0
-c37,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0.810781,0,8.430841,0
-c38,7.62006,0,0,0,0,0,28.2053,10.5597,0,0,0,0,0,0,0,46.38506,0
-c39,0,0,0,0,14.8008,20.8654,0,0,0,0,0,0,2.76826,0,0,38.43446,0
-c40,7.62006,0,0,0,0,0,28.2053,10.5597,0,0,0,6.35668,0,0,0,52.74174,0
-c41,7.62006,83.3307,28.5399,0,0,0,0,9.68469,0,0,0,0,0,0,0,129.17535,0
-c42,7.62006,0,41.6322,52.8973,0,0,0,0,0,10.5438,0,0,0,0,0,112.69336,0
-c43,7.62006,0,0,0,0,0,0,0,0,0,0,2.76929,0,0,0,10.38935,0
-c44,7.62006,83.3307,28.5399,0,0,0,0,9.68469,0,0,0,6.35668,2.76826,0,0,138.30029,0
-c45,7.62006,83.3307,28.5399,41.5238,0,0,0,9.68469,0,0,0,6.35668,2.76826,0,0,179.82409,0
-c46,7.62006,83.3307,28.5399,0,0,0,0,9.68469,0,0,0,6.35668,2.76826,0,0,138.30029,0
-c47,0,109.635,0,41.5238,0,0,0,9.68469,0,0,0,6.35668,2.76826,0,0,169.96843,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,3604.139105,2.63708520048687
-c2,450.60246,123.468921,426.5549,104.457756,51.115816,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,12.419968,1.04789,2616.04256,3.63312965163955
-c3,352.4661,123.468921,701.7255,1159.5933,51.115816,100.738165,971.2723,49.948052,102.069715,543.5573,346.89143,26.069312,351.94343,1.204529,1.669613,4883.733483,1.94613854266833
-c4,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,1.204529,0.107291,3359.143748,2.82941802675668
-c5,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,971.2723,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,4049.198635,2.34723528777354
-c6,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,290.68457,102.069715,102.069715,26.069312,26.069312,26.069312,12.419968,0.107291,3430.616457,2.77047055538944
-c7,450.60246,123.468921,426.5549,652.9992,51.115816,544.1687,100.738165,49.948052,857.91333,102.069715,208.37216,206.54856,354.71169,12.419968,0.107291,4141.738928,2.2947902061781
-c8,352.4661,1720.485,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,883.36663,26.069312,26.069312,351.94343,1.204529,0.107291,4297.334173,2.21170184914773
-c9,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,26.069312,210.33129,7.640322,1.669613,3370.924593,2.81952966132311
-c10,352.4661,1550.099,455.0948,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,344.22248,26.069312,1.204529,0.107291,3366.470208,2.82326035533832
-c11,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,971.2723,49.948052,102.069715,883.36663,26.069312,26.069312,210.33129,1.204529,1.669613,5898.643878,1.61128933928685
-c12,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,857.91333,102.069715,26.069312,26.069312,351.94343,1.204529,0.107291,4260.382233,2.23088479275224
-c13,450.60246,1610.85,426.5549,104.457756,51.115816,544.1687,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,12.419968,0.107291,4122.874209,2.30529030129596
-c14,352.4661,1720.485,426.5549,104.457756,51.115816,100.738165,971.2723,49.948052,857.91333,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,4816.54089,1.97328792129723
-c15,352.4661,1720.485,426.5549,104.457756,51.115816,925.6073,971.2723,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,4885.56641,1.94540840628122
-c16,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,857.91333,883.36663,26.069312,26.069312,351.94343,1.204529,0.107291,5041.679148,1.88516993852204
-c17,352.4661,1720.485,426.5549,104.457756,51.115816,544.1687,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,4123.15741,2.30513196135454
-c18,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,3360.967348,2.82788283583519
-c19,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,344.11179,26.069312,26.069312,1.204529,0.107291,3496.706978,2.71810647731929
-c20,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,490.07014,102.069715,102.069715,26.069312,26.069312,210.33129,1.204529,0.107291,3803.048566,2.49915869943275
-c21,352.4661,1720.485,426.5549,104.457756,51.115816,100.738165,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,210.33129,1.204529,1.669613,3865.551175,2.45874947241516
-c22,352.4661,1550.099,455.0948,104.457756,51.115816,100.738165,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,210.33129,1.204529,1.669613,3723.705075,2.55240995495837
-c23,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,490.07014,102.069715,102.069715,26.069312,344.22248,26.069312,12.419968,0.107291,3948.155195,2.40730707073162
-c24,450.60246,1610.85,426.5549,652.9992,51.115816,100.738165,590.3019,500.62984,102.069715,102.069715,26.069312,206.54856,26.069312,1.204529,0.107291,4847.930715,1.96051109652645
-c25,450.60246,1610.85,426.5549,104.457756,51.115816,544.1687,100.738165,290.68457,102.069715,102.069715,26.069312,344.22248,26.069312,1.204529,0.107291,4180.984721,2.27324962058263
-c26,450.60246,1610.85,426.5549,104.457756,51.115816,544.1687,992.1159,490.07014,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,4953.594858,1.91869182655931
-c27,352.4661,1720.485,426.5549,104.457756,51.115816,100.738165,971.2723,49.948052,102.069715,102.069715,26.069312,344.22248,26.069312,1.204529,1.669613,4380.412765,2.16975487263802
-c28,450.60246,1610.85,743.3577,705.8965,333.7788,544.1687,992.1159,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,1.204529,0.107291,6324.113881,1.50288596735524
-c29,352.4661,1720.485,426.5549,104.457756,51.115816,100.738165,971.2723,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,1.204529,0.107291,4241.176523,2.24098711344802
-c30,450.60246,1633.4297,455.0948,104.457756,51.115816,544.1687,992.1159,49.948052,527.54432,543.5573,26.069312,26.069312,351.94343,1.204529,0.107291,5757.428678,1.65081020095599
-c31,450.60246,1610.85,743.3577,705.8965,333.7788,544.1687,992.1159,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,1.204529,0.107291,6324.113881,1.50288596735524
-c32,352.4661,1720.485,426.5549,104.457756,558.563,925.6073,100.738165,49.948052,857.91333,102.069715,344.11179,26.069312,26.069312,1.204529,0.107291,5596.365552,1.6983204366933
-c33,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,590.3019,49.948052,102.069715,102.069715,344.11179,26.069312,26.069312,1.204529,1.669613,3987.833035,2.3833550292219
-c34,450.60246,1633.4297,455.0948,652.9992,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,208.37216,206.54856,354.71169,1.204529,0.107291,4469.750018,2.1263878085097
-c35,450.60246,1633.4297,701.7255,104.457756,51.115816,100.738165,100.738165,490.07014,102.069715,543.5573,208.37216,26.069312,26.069312,1.204529,1.669613,4541.889643,2.09261402099166
-c36,450.60246,1610.85,743.3577,104.457756,51.115816,100.738165,100.738165,290.68457,868.49843,102.069715,26.069312,344.22248,351.94343,1.204529,0.107291,5146.659819,1.84671657105463
-c37,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,490.07014,102.069715,102.069715,26.069312,206.54856,210.33129,13.230749,0.107291,3995.554034,2.37874943981425
-c38,450.60246,1610.85,426.5549,104.457756,51.115816,925.6073,618.5072,500.62984,102.069715,543.5573,208.37216,26.069312,26.069312,1.204529,0.107291,5595.774891,1.69849970259464
-c39,352.4661,1550.099,701.7255,1159.5933,348.5796,946.4727,100.738165,49.948052,102.069715,102.069715,208.37216,206.54856,354.71169,1.204529,0.107291,6184.706077,1.53676211706637
-c40,450.60246,1610.85,426.5549,104.457756,51.115816,925.6073,618.5072,500.62984,102.069715,102.069715,344.11179,212.90524,26.069312,1.204529,0.107291,5476.862864,1.73537702521929
-c41,450.60246,1633.4297,455.0948,104.457756,51.115816,100.738165,971.2723,300.36926,102.069715,883.36663,344.11179,26.069312,26.069312,1.204529,0.107291,5450.078836,1.74390541304263
-c42,450.60246,1610.85,743.3577,705.8965,333.7788,544.1687,590.3019,49.948052,527.54432,893.91043,344.11179,26.069312,210.33129,1.204529,0.107291,7032.183074,1.3515606637126
-c43,450.60246,1610.85,426.5549,104.457756,51.115816,100.738165,100.738165,290.68457,102.069715,543.5573,208.37216,346.99177,26.069312,1.204529,1.669613,4365.676231,2.17707897640293
-c44,450.60246,1633.4297,455.0948,104.457756,51.115816,100.738165,971.2723,300.36926,102.069715,883.36663,344.11179,212.90524,354.71169,1.204529,0.107291,5965.557142,1.5932161527317
-c45,450.60246,1633.4297,455.0948,1201.1171,51.115816,100.738165,971.2723,300.36926,102.069715,883.36663,344.11179,212.90524,354.71169,12.419968,0.107291,7073.431925,1.34367901245223
-c46,450.60246,1633.4297,455.0948,652.9992,51.115816,100.738165,971.2723,300.36926,102.069715,883.36663,344.11179,212.90524,354.71169,12.419968,0.107291,6525.314025,1.45654630197592
-c47,352.4661,1720.485,426.5549,1201.1171,51.115816,100.738165,971.2723,300.36926,102.069715,883.36663,344.11179,212.90524,354.71169,1.204529,0.107291,7022.595526,1.35340587215523
-c2,2616.04256
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_results2.csv b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_results2.csv
deleted file mode 100644
index 3371feb04844bd486683dbd9d19495a0aa255099..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_promise_results2.csv
+++ /dev/null
@@ -1,616 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1129.478,174.570537,1949.97,118.016247,38.840247,77.680494,118.016247,59.008124,157.113483,157.113483,29.645681,19.513340,19.513340,69.37799,0.010789,4117.868002,14.2098497070803
-c2,1129.478,86.311660,43.155830,118.016247,38.840247,6007.756,118.016247,59.008124,157.113483,157.113483,29.645681,19.513340,19.513340,0.839227,0.010789,7984.331698,7.32863918025767
-c3,1129.478,174.570537,1949.97,3356.913,38.840247,77.680494,3366.9575,59.008124,157.113483,157.113483,29.645681,19.513340,39.466907,0.839227,0.010789,10557.120812,5.54263679754661
-c4,1129.478,6358.145,43.155830,118.016247,38.840247,77.680494,6530.41,59.008124,157.113483,157.113483,29.645681,19.513340,2353.8847,0.839227,0.010789,17072.854645,3.42732880452443
-c5,1108.984,174.570537,43.155830,118.016247,38.840247,77.680494,6530.41,59.008124,157.113483,118.016247,29.645681,19.513340,19.513340,0.839227,0.010789,8495.317586,6.88782797839681
-c6,1129.478,6358.145,43.155830,118.016247,78.556742,77.680494,118.016247,59.008124,157.113483,157.113483,2479.9481,19.513340,19.513340,0.839227,0.010789,10816.108446,5.40992045282679
-c7,1108.984,174.570537,87.285268,77.680494,59.008124,157.113483,118.016247,78.556742,6053.9348,157.113483,19.513340,39.466907,19.513340,0.839227,10.00372,8161.599712,7.16946287380649
-c8,1129.478,174.570537,43.155830,118.016247,38.840247,77.680494,3366.9575,59.008124,157.113483,157.113483,29.645681,19.513340,19.513340,69.37799,0.010789,5459.995085,10.7169118031008
-c9,1129.478,86.311660,1949.97,157.113483,3475.891,3053.2545,157.113483,78.556742,77.680494,118.016247,29.645681,39.466907,29.645681,0.839227,0.016391,10382.999496,5.63558596935151
-c10,1129.478,6358.145,65.564582,118.016247,59.008124,157.113483,118.016247,59.008124,157.113483,77.680494,2479.9481,19.513340,2353.8847,69.37799,0.016391,13221.884305,4.42556333504717
-c11,1129.478,7471.505,87.285268,157.113483,1731.6018,118.016247,6530.41,3324.8076,157.113483,77.680494,29.645681,19.513340,19.513340,1.117251,1.812953,20856.61394,2.80555063879391
-c12,1129.478,174.570537,87.285268,118.016247,38.840247,77.680494,118.016247,78.556742,157.113483,157.113483,1299.926,19.513340,19.513340,0.839227,0.010789,3476.473444,16.8315064396187
-c13,1129.478,174.570537,43.155830,118.016247,38.840247,77.680494,157.113483,1683.0934,157.113483,157.113483,29.645681,2376.5679,19.513340,0.839227,1.812953,6164.554305,9.49205457502974
-c14,1129.478,6358.145,3792.343,6973.454,59.008124,118.016247,157.113483,59.008124,157.113483,118.016247,19.513340,19.513340,19.513340,0.839227,0.021821,18981.096776,3.08276635566759
-c15,1108.984,174.570537,65.564582,118.016247,38.840247,77.680494,118.016247,78.556742,118.016247,77.680494,19.513340,2376.5679,1286.6509,0.552395,0.016391,5659.226763,10.3396255812903
-c16,1129.478,131.129164,65.564582,118.016247,38.840247,3053.2545,157.113483,1683.0934,3203.344,157.113483,19.513340,2376.5679,19.513340,69.37799,0.016391,12221.936067,4.7876446123972
-c17,1129.478,131.129164,43.155830,118.016247,38.840247,6007.756,3366.9575,59.008124,157.113483,157.113483,29.645681,19.513340,2353.8847,0.552395,0.010789,13612.174983,4.2986728047656
-c18,1129.478,174.570537,43.155830,118.016247,38.840247,77.680494,118.016247,3324.8076,157.113483,77.680494,29.645681,2376.5679,19.513340,0.839227,0.016391,7685.941718,7.61315766220909
-c19,1108.984,174.570537,43.155830,157.113483,38.840247,77.680494,118.016247,59.008124,157.113483,118.016247,2479.9481,19.513340,2353.8847,0.839227,0.010789,6906.694848,8.47211108693084
-c20,1108.984,131.129164,43.155830,157.113483,3475.891,6007.756,77.680494,78.556742,3203.344,77.680494,39.466907,19.513340,19.513340,0.839227,0.021821,14440.645842,4.05205467110122
-c21,1129.478,174.570537,43.155830,118.016247,38.840247,77.680494,118.016247,59.008124,157.113483,77.680494,29.645681,2376.5679,2353.8847,0.839227,0.010789,6754.508,8.6629975087305
-c22,1108.984,131.129164,1949.97,3356.913,38.840247,157.113483,77.680494,78.556742,157.113483,77.680494,2479.9481,29.645681,2353.8847,0.552395,0.021821,11998.033804,4.87698962273244
-c23,1108.984,174.570537,65.564582,118.016247,38.840247,77.680494,118.016247,78.556742,118.016247,3427.0759,19.513340,2376.5679,1286.6509,0.552395,0.016391,9008.622169,6.49536467350355
-c24,1108.984,86.311660,43.155830,3356.913,38.840247,3053.2545,157.113483,38.840247,6053.9348,77.680494,2479.9481,39.466907,1286.6509,57.16078,0.010789,17878.265737,3.27292855898259
-c25,1129.478,174.570537,43.155830,118.016247,38.840247,118.016247,6530.41,3324.8076,157.113483,157.113483,29.645681,2376.5679,29.645681,0.839227,0.010789,14228.230952,4.11254825910175
-c26,1129.478,131.129164,43.155830,118.016247,38.840247,3053.2545,6530.41,78.556742,157.113483,157.113483,2479.9481,19.513340,19.513340,0.839227,0.010789,13956.892492,4.1925010497351
-c27,1129.478,131.129164,65.564582,118.016247,78.556742,3053.2545,3366.9575,3324.8076,157.113483,157.113483,1299.926,39.466907,29.645681,0.552395,0.010789,12951.593073,4.51792193141025
-c28,1129.478,131.129164,65.564582,157.113483,78.556742,3053.2545,3366.9575,3324.8076,118.016247,157.113483,1299.926,39.466907,29.645681,0.552395,0.010789,12951.593073,4.51792193141025
-c29,1129.478,174.570537,65.564582,118.016247,38.840247,157.113483,118.016247,78.556742,118.016247,157.113483,1299.926,19.513340,2353.8847,0.839227,1.812953,5831.262035,10.0345835066802
-c30,1129.478,174.570537,87.285268,157.113483,59.008124,6007.756,6530.41,78.556742,6053.9348,157.113483,29.645681,2376.5679,29.645681,0.839227,0.016391,22871.941317,2.55834368303812
-c31,1129.478,174.570537,65.564582,157.113483,38.840247,157.113483,6530.41,38.840247,118.016247,157.113483,1299.926,39.466907,39.466907,57.16078,0.010789,10003.091692,5.84962010343612
-c32,1108.984,131.129164,43.155830,157.113483,78.556742,118.016247,77.680494,38.840247,118.016247,3427.0759,29.645681,1273.3007,1286.6509,0.552395,0.016391,7888.734421,7.41744910888224
-c33,1129.478,131.129164,43.155830,77.680494,1731.6018,77.680494,3366.9575,78.556742,6053.9348,118.016247,2479.9481,2376.5679,19.513340,1.117251,0.016391,17685.354053,3.30862963426006
-c34,1129.478,174.570537,43.155830,118.016247,38.840247,77.680494,118.016247,59.008124,157.113483,3427.0759,29.645681,2376.5679,1286.6509,0.839227,0.016391,9036.675208,6.47520076229788
-c35,1129.478,174.570537,43.155830,157.113483,78.556742,118.016247,77.680494,59.008124,118.016247,3427.0759,2479.9481,19.513340,39.466907,0.552395,0.016391,7922.168737,7.38614488619741
-c36,1129.478,174.570537,65.564582,157.113483,38.840247,157.113483,77.680494,1683.0934,77.680494,118.016247,1299.926,1273.3007,19.513340,0.839227,0.021821,6272.752055,9.32832756613193
-c37,1108.984,6358.145,43.155830,3356.913,3475.891,3053.2545,157.113483,38.840247,6053.9348,157.113483,1299.926,39.466907,29.645681,57.16078,0.010789,25229.5555,2.3192753677754
-c38,1129.478,174.570537,65.564582,157.113483,38.840247,157.113483,77.680494,1683.0934,77.680494,118.016247,1299.926,1273.3007,19.513340,57.16078,0.021821,6329.073608,9.24531606718965
-c39,1108.984,131.129164,3792.343,77.680494,59.008124,6007.756,157.113483,1683.0934,118.016247,77.680494,39.466907,39.466907,19.513340,1.117251,0.021821,13312.390632,4.39547546500005
-c40,1129.478,131.129164,65.564582,118.016247,1731.6018,6007.756,3366.9575,59.008124,118.016247,3427.0759,39.466907,29.645681,29.645681,0.839227,0.010789,16254.211849,3.59994609560877
-c41,1129.478,174.570537,3792.343,118.016247,38.840247,3053.2545,6530.41,59.008124,157.113483,77.680494,29.645681,19.513340,19.513340,0.839227,0.010789,15200.237009,3.84956408399405
-c42,1129.478,86.311660,1949.97,118.016247,38.840247,77.680494,118.016247,59.008124,6053.9348,157.113483,2479.9481,19.513340,19.513340,0.839227,0.010789,12308.194098,4.75409194083956
-c43,1129.478,131.129164,3792.343,118.016247,38.840247,3053.2545,118.016247,59.008124,157.113483,157.113483,39.466907,1273.3007,19.513340,0.839227,10.00372,10097.436389,5.79496458400552
-c44,1129.478,131.129164,43.155830,6973.454,3475.891,118.016247,157.113483,78.556742,157.113483,6493.6098,1299.926,19.513340,1286.6509,0.839227,10.00372,21374.450936,2.73758080356998
-c45,1129.478,174.570537,1949.97,118.016247,38.840247,77.680494,3366.9575,59.008124,157.113483,157.113483,29.645681,1273.3007,19.513340,0.839227,0.010789,8552.057852,6.84212936481748
-c46,1129.478,131.129164,1949.97,118.016247,59.008124,118.016247,157.113483,78.556742,118.016247,77.680494,2479.9481,19.513340,2353.8847,1.117251,10.00372,8801.451859,6.64825384670375
-c47,1108.984,174.570537,1949.97,118.016247,38.840247,77.680494,6530.41,59.008124,77.680494,6493.6098,39.466907,29.645681,1286.6509,0.839227,1.812953,17987.185611,3.2531096183221
-c48,1129.478,174.570537,3792.343,157.113483,38.840247,77.680494,6530.41,78.556742,157.113483,157.113483,29.645681,1273.3007,19.513340,57.16078,10.00372,13682.84369,4.27647115914345
-c49,1129.478,131.129164,1949.97,157.113483,3475.891,3053.2545,157.113483,3324.8076,6053.9348,118.016247,29.645681,39.466907,1286.6509,0.839227,0.010789,20907.321781,2.79874616060588
-c50,1108.984,131.129164,3792.343,157.113483,3475.891,3053.2545,77.680494,78.556742,3203.344,6493.6098,1299.926,19.513340,19.513340,0.839227,10.00372,22921.70181,2.55278980037133
-c51,1129.478,131.129164,43.155830,3356.913,1731.6018,3053.2545,77.680494,1683.0934,118.016247,157.113483,1299.926,39.466907,29.645681,0.552395,1.812953,12852.839854,4.55263483031153
-c12,3476.473444
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,442.9824,90.532480,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,12.419968,0.018020,1533.001848,6.19987611261665
-c2,442.9824,90.532480,45.439232,82.574848,41.518080,925.6073,82.574848,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,1983.421288,4.79193287694848
-c3,442.9824,90.532480,426.5549,652.9992,41.518080,82.574848,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,2599.655908,3.6560306935809
-c4,442.9824,1610.85,45.439232,82.574848,41.518080,82.574848,971.2723,41.518080,83.036160,83.036160,21.220352,21.220352,351.94343,0.922624,0.018020,3880.126886,2.44951317116506
-c5,352.4661,90.532480,45.439232,82.574848,41.518080,82.574848,971.2723,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,1938.569988,4.90280037685179
-c6,442.9824,1610.85,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,344.11179,21.220352,21.220352,0.922624,0.018020,2983.597794,3.18555733569639
-c7,352.4661,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,857.91333,83.036160,21.220352,21.220352,21.220352,0.922624,1.669613,1826.401299,5.20390652525995
-c8,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,12.419968,0.018020,1659.613232,5.72688949572743
-c9,442.9824,90.532480,426.5549,82.574848,558.563,544.1687,82.574848,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,2500.143276,3.80155084273855
-c10,442.9824,1610.85,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,344.11179,21.220352,351.94343,12.419968,0.018020,3325.818216,2.85776950360628
-c11,442.9824,1550.099,45.439232,82.574848,333.7788,82.574848,971.2723,490.07014,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,1.04789,4230.495458,2.24664510993915
-c12,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,1327.540644,7.15942030476769
-c13,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,290.68457,83.036160,83.036160,21.220352,344.22248,21.220352,0.922624,1.04789,1713.587324,5.54650555021814
-c14,442.9824,1610.85,701.7255,1159.5933,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,4394.011076,2.16304005094774
-c15,352.4661,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,344.22248,210.33129,0.922624,0.018020,1561.985602,6.08483300828576
-c16,442.9824,90.532480,45.439232,82.574848,41.518080,544.1687,82.574848,290.68457,527.54432,83.036160,21.220352,344.22248,21.220352,12.419968,0.018020,2630.15681,3.61363313415396
-c17,442.9824,90.532480,45.439232,82.574848,41.518080,925.6073,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,351.94343,0.922624,0.018020,2821.871418,3.36812717991365
-c18,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,490.07014,83.036160,83.036160,21.220352,344.22248,21.220352,0.922624,0.018020,1911.943024,4.97107996503352
-c19,352.4661,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,344.11179,21.220352,351.94343,0.922624,0.018020,1703.487052,5.57939174759329
-c20,352.4661,90.532480,45.439232,82.574848,558.563,925.6073,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,2854.458068,3.32967645648118
-c21,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,344.22248,351.94343,0.922624,0.018020,1794.114042,5.29755712610619
-c22,352.4661,90.532480,426.5549,652.9992,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,344.11179,21.220352,351.94343,0.922624,0.018020,2655.027072,3.57978338535814
-c23,352.4661,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,543.5573,21.220352,344.22248,210.33129,0.922624,0.018020,2022.506742,4.69932756746639
-c24,352.4661,90.532480,45.439232,652.9992,41.518080,544.1687,82.574848,41.518080,857.91333,83.036160,344.11179,21.220352,210.33129,7.640322,0.018020,3375.487984,2.81571788182322
-c25,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,971.2723,490.07014,83.036160,83.036160,21.220352,344.22248,21.220352,0.922624,0.018020,2800.640476,3.39366009314006
-c26,442.9824,90.532480,45.439232,82.574848,41.518080,544.1687,971.2723,41.518080,83.036160,83.036160,344.11179,21.220352,21.220352,0.922624,0.018020,2813.571578,3.37806291992395
-c27,442.9824,90.532480,45.439232,82.574848,41.518080,544.1687,590.3019,490.07014,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,2745.413608,3.46192711513918
-c28,442.9824,90.532480,45.439232,82.574848,41.518080,544.1687,590.3019,490.07014,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,2745.413608,3.46192711513918
-c29,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,208.37216,21.220352,351.94343,0.922624,1.04789,1659.293592,5.72799270184895
-c30,442.9824,90.532480,45.439232,82.574848,41.518080,925.6073,971.2723,41.518080,857.91333,83.036160,21.220352,344.22248,21.220352,0.922624,0.018020,3969.998038,2.39406211983468
-c31,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,971.2723,41.518080,83.036160,83.036160,208.37216,21.220352,21.220352,7.640322,0.018020,2222.955794,4.27557837906432
-c32,352.4661,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,543.5573,21.220352,206.54856,210.33129,0.922624,0.018020,1884.832822,5.04258072270662
-c33,442.9824,90.532480,45.439232,82.574848,333.7788,82.574848,590.3019,41.518080,857.91333,83.036160,344.11179,344.22248,21.220352,0.922624,0.018020,3361.147344,2.82773139719484
-c34,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,543.5573,21.220352,344.22248,210.33129,0.922624,0.018020,2113.023042,4.49802085414169
-c35,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,543.5573,344.11179,21.220352,21.220352,0.922624,0.018020,1923.801414,4.94043802795344
-c36,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,0.922624,0.018020,1762.035342,5.39400169341202
-c37,352.4661,1610.85,45.439232,652.9992,558.563,544.1687,82.574848,41.518080,857.91333,83.036160,208.37216,21.220352,21.220352,7.640322,0.018020,5087.999856,1.86800751576107
-c38,442.9824,90.532480,45.439232,82.574848,41.518080,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,7.640322,0.018020,1768.75304,5.3735153555685
-c39,352.4661,90.532480,701.7255,82.574848,41.518080,925.6073,82.574848,290.68457,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,2798.357746,3.39642843447835
-c40,442.9824,90.532480,45.439232,82.574848,333.7788,925.6073,590.3019,41.518080,83.036160,543.5573,21.220352,21.220352,21.220352,0.922624,0.018020,3243.9302,2.92990948603304
-c41,442.9824,90.532480,701.7255,82.574848,41.518080,544.1687,971.2723,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,3146.966408,3.0201853543209
-c42,442.9824,90.532480,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,857.91333,83.036160,344.11179,21.220352,21.220352,0.922624,0.018020,2619.273112,3.62864863216873
-c43,442.9824,90.532480,701.7255,82.574848,41.518080,544.1687,82.574848,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,0.922624,1.669613,2445.248757,3.88689361035452
-c44,442.9824,90.532480,45.439232,1159.5933,558.563,82.574848,82.574848,41.518080,83.036160,883.36663,208.37216,21.220352,210.33129,0.922624,1.669613,3912.697017,2.4291228975289
-c45,442.9824,90.532480,426.5549,82.574848,41.518080,82.574848,590.3019,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,0.922624,0.018020,2214.559764,4.29178831988441
-c46,442.9824,90.532480,426.5549,82.574848,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,344.11179,21.220352,351.94343,0.922624,1.669613,2176.770613,4.36629457629054
-c47,352.4661,90.532480,426.5549,82.574848,41.518080,82.574848,971.2723,41.518080,83.036160,883.36663,21.220352,21.220352,210.33129,0.922624,1.04789,3310.156934,2.87129041322696
-c48,442.9824,90.532480,701.7255,82.574848,41.518080,82.574848,971.2723,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,2879.070055,3.30121242148057
-c49,442.9824,90.532480,426.5549,82.574848,558.563,544.1687,82.574848,490.07014,857.91333,83.036160,21.220352,21.220352,210.33129,0.922624,0.018020,3912.683444,2.42913132409463
-c50,352.4661,90.532480,701.7255,82.574848,558.563,544.1687,82.574848,41.518080,527.54432,883.36663,208.37216,21.220352,21.220352,0.922624,1.669613,4118.439607,2.30777256295524
-c51,442.9824,90.532480,45.439232,652.9992,333.7788,544.1687,82.574848,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,1.04789,2902.015928,3.2751101531821
-c12,1327.540644
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1145.5648,472.88204,1949.97,336.517905,139.46418,272.76268,313.098433,152.266559,357.330128,357.330128,80.061262,69.928921,69.928921,69.37799,0.532687,5787.016634,10.1113042406143
-c2,1145.5648,384.623163,174.989917,336.517905,139.46418,6007.756,313.098433,152.266559,357.330128,357.330128,80.061262,69.928921,69.928921,2.857198,0.532687,9592.250202,6.10016263136917
-c3,1145.5648,472.88204,1949.97,3356.913,139.46418,272.76268,3366.9575,152.266559,357.330128,357.330128,80.061262,69.928921,89.882488,2.857198,0.532687,11814.703571,4.95266648004278
-c4,1145.5648,6358.145,174.989917,336.517905,139.46418,272.76268,6530.41,152.266559,357.330128,357.330128,80.061262,69.928921,2353.8847,2.857198,0.532687,18332.046065,3.19191247481784
-c5,1108.984,472.88204,174.989917,336.517905,139.46418,272.76268,6530.41,152.266559,357.330128,318.232892,80.061262,69.928921,69.928921,2.857198,0.532687,10087.14929,5.80087441760392
-c6,1145.5648,6358.145,174.989917,336.517905,179.180675,272.76268,313.098433,152.266559,357.330128,357.330128,2479.9481,69.928921,69.928921,2.857198,0.532687,12270.382052,4.76874200967429
-c7,1108.984,472.88204,219.119355,296.182152,159.632057,352.195669,313.098433,171.815177,6053.9348,357.330128,69.928921,89.882488,69.928921,2.857198,10.00372,9747.775059,6.00283509709131
-c8,1145.5648,472.88204,174.989917,336.517905,139.46418,272.76268,3366.9575,152.266559,357.330128,357.330128,80.061262,69.928921,69.928921,69.37799,0.532687,7065.895618,8.28122706282488
-c9,1145.5648,384.623163,1949.97,375.615141,3475.891,3197.8545,352.195669,171.815177,277.897139,318.232892,80.061262,89.882488,80.061262,2.857198,0.538289,11903.05998,4.91590283924703
-c10,1145.5648,6358.145,197.398669,336.517905,159.632057,352.195669,313.098433,152.266559,357.330128,277.897139,2479.9481,69.928921,2353.8847,69.37799,0.538289,14623.724359,4.00132585970519
-c11,1145.5648,7908.45,219.119355,375.615141,1731.6018,313.098433,6530.41,3324.8076,357.330128,277.897139,80.061262,69.928921,69.928921,3.135222,1.812953,22408.761675,2.61122356650159
-c12,1145.5648,472.88204,219.119355,336.517905,139.46418,272.76268,313.098433,171.815177,357.330128,357.330128,1299.926,69.928921,69.928921,2.857198,0.532687,5229.058553,11.1902142863571
-c13,1145.5648,472.88204,174.989917,336.517905,139.46418,272.76268,352.195669,1683.0934,357.330128,357.330128,80.061262,2376.5679,69.928921,2.857198,1.812953,7823.359081,7.47943249047151
-c14,1145.5648,6358.145,3996.263,6973.454,159.632057,313.098433,352.195669,152.266559,357.330128,318.232892,69.928921,69.928921,69.928921,2.857198,0.543719,20339.370218,2.87689765848925
-c15,1108.984,472.88204,197.398669,336.517905,139.46418,272.76268,313.098433,171.815177,318.232892,277.897139,69.928921,2376.5679,1319.7884,2.570366,0.538289,7378.446991,7.93043388687762
-c16,1145.5648,429.440667,197.398669,336.517905,139.46418,3053.2545,352.195669,1683.0934,3203.344,357.330128,69.928921,2376.5679,69.928921,69.37799,0.538289,13483.945939,4.33955213657467
-c17,1145.5648,429.440667,174.989917,336.517905,139.46418,6007.756,3519.4585,152.266559,357.330128,357.330128,80.061262,69.928921,2353.8847,2.570366,0.532687,15127.09672,3.86817692378596
-c18,1145.5648,472.88204,174.989917,336.517905,139.46418,272.76268,313.098433,3324.8076,357.330128,277.897139,80.061262,2376.5679,69.928921,2.857198,0.538289,9345.268392,6.26138102860191
-c19,1108.984,472.88204,174.989917,375.615141,139.46418,272.76268,313.098433,152.266559,357.330128,318.232892,2479.9481,69.928921,2353.8847,2.857198,0.532687,8592.777576,6.80970566786953
-c20,1108.984,429.440667,174.989917,375.615141,3475.891,6007.756,272.76268,171.815177,3203.344,277.897139,89.882488,69.928921,69.928921,2.857198,0.543719,15731.636968,3.71952941642831
-c21,1145.5648,472.88204,174.989917,336.517905,139.46418,272.76268,313.098433,152.266559,357.330128,277.897139,80.061262,2376.5679,2353.8847,2.857198,0.532687,8456.677528,6.91929968445996
-c22,1108.984,429.440667,1949.97,3356.913,139.46418,352.195669,272.76268,171.815177,357.330128,277.897139,2479.9481,80.061262,2353.8847,2.570366,0.543719,13333.780787,4.38842420907408
-c23,1108.984,472.88204,197.398669,336.517905,139.46418,272.76268,313.098433,171.815177,318.232892,3427.0759,69.928921,2376.5679,1319.7884,2.570366,0.538289,10527.625752,5.5581655033726
-c24,1108.984,384.623163,174.989917,3356.913,139.46418,3053.2545,352.195669,132.098682,6053.9348,277.897139,2479.9481,89.882488,1286.6509,57.16078,0.532687,18948.530005,3.08806469519025
-c25,1145.5648,472.88204,174.989917,336.517905,139.46418,313.098433,6530.41,3324.8076,357.330128,357.330128,80.061262,2376.5679,80.061262,2.857198,0.532687,15692.47544,3.72881172851584
-c26,1145.5648,429.440667,174.989917,336.517905,139.46418,3053.2545,6654.867,171.815177,357.330128,357.330128,2479.9481,69.928921,69.928921,2.857198,0.532687,15443.770229,3.78886020683195
-c27,1145.5648,429.440667,197.398669,336.517905,179.180675,3053.2545,3366.9575,3383.8134,357.330128,357.330128,1299.926,89.882488,80.061262,2.570366,0.532687,14279.761175,4.09770763783304
-c28,1145.5648,429.440667,197.398669,375.615141,179.180675,3053.2545,3366.9575,3383.8134,318.232892,357.330128,1299.926,89.882488,80.061262,2.570366,0.532687,14279.761175,4.09770763783304
-c29,1145.5648,472.88204,197.398669,336.517905,139.46418,352.195669,313.098433,171.815177,318.232892,357.330128,1299.926,69.928921,2353.8847,2.857198,1.812953,7532.909665,7.76781996179932
-c30,1145.5648,472.88204,219.119355,375.615141,159.632057,6007.756,6530.41,171.815177,6053.9348,357.330128,80.061262,2376.5679,80.061262,2.857198,0.538289,24034.145409,2.43463146301949
-c31,1145.5648,472.88204,197.398669,375.615141,139.46418,352.195669,6530.41,132.098682,318.232892,357.330128,1299.926,89.882488,89.882488,57.16078,0.532687,11558.576644,5.06241279864967
-c32,1108.984,429.440667,174.989917,375.615141,179.180675,313.098433,272.76268,132.098682,318.232892,3427.0759,80.061262,1273.3007,1286.6509,2.570366,0.538289,9374.600504,6.24178984414897
-c33,1145.5648,429.440667,174.989917,296.182152,1731.6018,272.76268,3366.9575,171.815177,6053.9348,318.232892,2479.9481,2376.5679,69.928921,3.135222,0.538289,18891.600817,3.09737047167584
-c34,1145.5648,472.88204,174.989917,336.517905,139.46418,272.76268,313.098433,152.266559,357.330128,3427.0759,80.061262,2376.5679,1319.7884,2.857198,0.538289,10571.765591,5.53495873379171
-c35,1145.5648,472.88204,174.989917,375.615141,179.180675,313.098433,272.76268,152.266559,318.232892,3427.0759,2492.7759,69.928921,89.882488,2.570366,0.538289,9487.365001,6.16760145942232
-c36,1145.5648,472.88204,197.398669,375.615141,139.46418,352.195669,272.76268,1683.0934,277.897139,318.232892,1299.926,1273.3007,69.928921,2.857198,0.543719,7881.663148,7.42410389810148
-c37,1108.984,6756.043,174.989917,3356.913,3586.141,3197.8545,352.195669,132.098682,6053.9348,357.330128,1299.926,89.882488,80.061262,57.16078,0.532687,26604.047913,2.19945050521662
-c38,1145.5648,472.88204,197.398669,375.615141,139.46418,352.195669,272.76268,1683.0934,277.897139,318.232892,1299.926,1273.3007,69.928921,57.16078,0.543719,7935.96673,7.37330284972978
-c39,1108.984,429.440667,3792.343,296.182152,159.632057,6007.756,352.195669,1683.0934,318.232892,277.897139,89.882488,89.882488,69.928921,3.135222,0.543719,14679.129814,3.98622310626142
-c40,1145.5648,429.440667,197.398669,336.517905,1731.6018,6130.47,3519.4585,152.266559,318.232892,3427.0759,89.882488,80.061262,80.061262,2.857198,0.532687,17641.422589,3.31686893254281
-c41,1145.5648,472.88204,3792.343,336.517905,139.46418,3053.2545,6654.867,152.266559,357.330128,277.897139,80.061262,69.928921,69.928921,2.857198,0.532687,16605.69624,3.52374785404513
-c42,1145.5648,384.623163,1949.97,336.517905,139.46418,272.76268,313.098433,152.266559,6053.9348,357.330128,2479.9481,69.928921,69.928921,2.857198,0.532687,13728.728475,4.26217814150354
-c43,1145.5648,429.440667,3792.343,336.517905,139.46418,3053.2545,313.098433,152.266559,357.330128,357.330128,89.882488,1273.3007,69.928921,2.857198,10.00372,11522.583327,5.0782263555487
-c44,1145.5648,429.440667,174.989917,6973.454,3475.891,313.098433,352.195669,171.815177,357.330128,6493.6098,1337.2918,69.928921,1286.6509,2.857198,10.00372,22594.12213,2.58980128758027
-c45,1145.5648,472.88204,1949.97,336.517905,139.46418,272.76268,3366.9575,152.266559,357.330128,357.330128,80.061262,1273.3007,69.928921,2.857198,0.532687,9977.726688,5.86449078896146
-c46,1145.5648,429.440667,1949.97,336.517905,159.632057,313.098433,352.195669,171.815177,318.232892,277.897139,2479.9481,69.928921,2353.8847,3.135222,10.00372,10371.265402,5.64196209534083
-c47,1108.984,472.88204,1949.97,336.517905,139.46418,272.76268,6530.41,152.266559,277.897139,6493.6098,89.882488,80.061262,1286.6509,2.857198,1.812953,19196.029104,3.04824952187544
-c48,1145.5648,472.88204,3792.343,375.615141,139.46418,272.76268,6530.41,171.815177,357.330128,357.330128,80.061262,1273.3007,69.928921,57.16078,10.00372,15105.972657,3.87358614928555
-c49,1145.5648,429.440667,1949.97,375.615141,3475.891,3197.8545,352.195669,3324.8076,6053.9348,318.232892,80.061262,89.882488,1286.6509,2.857198,0.532687,22083.491604,2.64968455293695
-c50,1108.984,429.440667,3792.343,375.615141,3475.891,3197.8545,272.76268,171.815177,3203.344,6559.316,1337.2918,69.928921,69.928921,2.857198,10.00372,24077.376725,2.43026004320552
-c51,1145.5648,429.440667,174.989917,3356.913,1731.6018,3053.2545,272.76268,1683.0934,318.232892,357.330128,1299.926,89.882488,80.061262,2.570366,1.812953,13997.436853,4.18035723536222
-c12,5229.058553
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,188.762345,0,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,0,0.102999,1223.640147,0
-c2,0,188.762345,90.173220,157.595858,76.708720,0,151.959867,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,1163.009848,0
-c3,0,188.762345,0,0,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,915.24077,0
-c4,0,0,90.173220,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,0,1.156348,0.102999,935.792851,0
-c5,0,188.762345,90.173220,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,1163.009848,0
-c6,0,0,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,1087.752718,0
-c7,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,38.454652,1.156348,0,1161.738813,0
-c8,0,188.762345,90.173220,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,0,0.102999,1161.8535,0
-c9,0,188.762345,0,157.595858,0,0,151.959867,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,996.127908,0
-c10,0,0,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,38.454652,0,0,0.102999,1048.141718,0
-c11,0,0,90.173220,157.595858,0,151.959867,0,0,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0,822.505055,0
-c12,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,1276.515063,0
-c13,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,0,153.127903,153.127903,38.454652,0,38.454652,1.156348,0,1201.481335,0
-c14,0,0,0,0,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,878.438292,0
-c15,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,0,0,1.156348,0.102999,1238.060411,0
-c16,0,188.762345,90.173220,157.595858,76.708720,0,151.959867,0,0,153.127903,38.454652,0,38.454652,0,0.102999,895.340216,0
-c17,0,188.762345,90.173220,157.595858,76.708720,0,0,74.930729,153.127903,153.127903,38.454652,38.454652,0,1.156348,0.102999,972.595329,0
-c18,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,0,153.127903,153.127903,38.454652,0,38.454652,1.156348,0.102999,1201.584334,0
-c19,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,38.454652,0,1.156348,0.102999,1238.060411,0
-c20,0,188.762345,90.173220,157.595858,0,0,151.959867,74.930729,0,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,933.173225,0
-c21,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,0,0,1.156348,0.102999,1238.060411,0
-c22,0,188.762345,0,0,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,38.454652,0,1.156348,0.102999,990.291333,0
-c23,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,0,38.454652,0,0,1.156348,0.102999,1084.932508,0
-c24,0,188.762345,90.173220,0,76.708720,0,151.959867,74.930729,0,153.127903,0,38.454652,0,0,0.102999,774.220435,0
-c25,0,188.762345,90.173220,157.595858,76.708720,151.959867,0,0,153.127903,153.127903,38.454652,0,38.454652,1.156348,0.102999,1049.624467,0
-c26,0,188.762345,90.173220,157.595858,76.708720,0,0,74.930729,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,972.595329,0
-c27,0,188.762345,90.173220,157.595858,76.708720,0,0,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,897.6646,0
-c28,0,188.762345,90.173220,157.595858,76.708720,0,0,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,897.6646,0
-c29,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,38.454652,0,1.156348,0,1237.957412,0
-c30,0,188.762345,90.173220,157.595858,76.708720,0,0,74.930729,0,153.127903,38.454652,0,38.454652,1.156348,0.102999,819.467426,0
-c31,0,188.762345,90.173220,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,0,38.454652,38.454652,0,0.102999,1123.398848,0
-c32,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,0,38.454652,0,0,1.156348,0.102999,1084.932508,0
-c33,0,188.762345,90.173220,157.595858,0,151.959867,0,74.930729,0,153.127903,0,0,38.454652,1.156348,0.102999,856.263921,0
-c34,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,0,38.454652,0,0,1.156348,0.102999,1084.932508,0
-c35,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,0,0,38.454652,38.454652,1.156348,0.102999,1123.38716,0
-c36,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,1.156348,0.102999,1163.129682,0
-c37,0,0,90.173220,0,0,0,151.959867,74.930729,0,153.127903,0,38.454652,38.454652,0,0.102999,547.204022,0
-c38,0,188.762345,90.173220,157.595858,76.708720,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0.102999,1161.973334,0
-c39,0,188.762345,0,157.595858,76.708720,0,151.959867,0,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,997.905899,0
-c40,0,188.762345,90.173220,157.595858,0,0,0,74.930729,153.127903,0,38.454652,38.454652,38.454652,1.156348,0.102999,781.213358,0
-c41,0,188.762345,0,157.595858,76.708720,0,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,920.876761,0
-c42,0,188.762345,0,157.595858,76.708720,151.959867,151.959867,74.930729,0,153.127903,0,38.454652,38.454652,1.156348,0.102999,1033.21394,0
-c43,0,188.762345,0,157.595858,76.708720,0,151.959867,74.930729,153.127903,153.127903,38.454652,0,38.454652,1.156348,0,1034.278977,0
-c44,0,188.762345,90.173220,0,0,151.959867,151.959867,74.930729,153.127903,0,0,38.454652,0,1.156348,0,850.524931,0
-c45,0,188.762345,0,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,0,38.454652,1.156348,0.102999,1034.381976,0
-c46,0,188.762345,0,157.595858,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,0,38.454652,0,1.156348,0,1147.784192,0
-c47,0,188.762345,0,157.595858,76.708720,151.959867,0,74.930729,153.127903,0,38.454652,38.454652,0,1.156348,0,881.151074,0
-c48,0,188.762345,0,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,0,38.454652,0,0,1033.122629,0
-c49,0,188.762345,0,157.595858,0,0,151.959867,0,0,153.127903,38.454652,38.454652,0,1.156348,0.102999,729.614624,0
-c50,0,188.762345,0,157.595858,0,0,151.959867,74.930729,0,0,0,38.454652,38.454652,1.156348,0,651.314451,0
-c51,0,188.762345,90.173220,0,0,0,151.959867,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0,815.21689,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,109.549158,0,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0,0.418899,429.421685,0
-c2,0,109.549158,41.660867,60.905800,23.915213,0,43.122319,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,428.821856,0
-c3,0,109.549158,0,0,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,326.255189,0
-c4,0,0,41.660867,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,0,0.861623,0.418899,307.311769,0
-c5,0,109.549158,41.660867,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,428.821856,0
-c6,0,0,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,350.434088,0
-c7,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,11.960929,0.861623,0,424.436534,0
-c8,0,109.549158,41.660867,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0,0.418899,427.960233,0
-c9,0,109.549158,0,60.905800,0,0,43.122319,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,363.245776,0
-c10,0,0,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,11.960929,0,0,0.418899,337.611536,0
-c11,0,0,41.660867,60.905800,0,43.122319,0,0,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0,276.61088,0
-c12,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,459.983246,0
-c13,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,0,47.088742,47.088742,11.960929,0,11.960929,0.861623,0,441.236641,0
-c14,0,0,0,0,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,259.82835,0
-c15,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,0,0,0.861623,0.418899,448.022317,0
-c16,0,109.549158,41.660867,60.905800,23.915213,0,43.122319,0,0,47.088742,11.960929,0,11.960929,0,0.418899,350.582856,0
-c17,0,109.549158,41.660867,60.905800,23.915213,0,0,18.327706,47.088742,47.088742,11.960929,11.960929,0,0.861623,0.418899,373.738608,0
-c18,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,0,47.088742,47.088742,11.960929,0,11.960929,0.861623,0.418899,441.65554,0
-c19,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,11.960929,0,0.861623,0.418899,448.022317,0
-c20,0,109.549158,41.660867,60.905800,0,0,43.122319,18.327706,0,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,357.817901,0
-c21,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,0,0,0.861623,0.418899,448.022317,0
-c22,0,109.549158,0,0,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,11.960929,0,0.861623,0.418899,345.45565,0
-c23,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,0,11.960929,0,0,0.861623,0.418899,400.933575,0
-c24,0,109.549158,41.660867,0,23.915213,0,43.122319,18.327706,0,47.088742,0,11.960929,0,0,0.418899,296.043833,0
-c25,0,109.549158,41.660867,60.905800,23.915213,43.122319,0,0,47.088742,47.088742,11.960929,0,11.960929,0.861623,0.418899,398.533221,0
-c26,0,109.549158,41.660867,60.905800,23.915213,0,0,18.327706,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,373.738608,0
-c27,0,109.549158,41.660867,60.905800,23.915213,0,0,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,355.410902,0
-c28,0,109.549158,41.660867,60.905800,23.915213,0,0,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,355.410902,0
-c29,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,11.960929,0,0.861623,0,447.603418,0
-c30,0,109.549158,41.660867,60.905800,23.915213,0,0,18.327706,0,47.088742,11.960929,0,11.960929,0.861623,0.418899,326.649866,0
-c31,0,109.549158,41.660867,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,0,11.960929,11.960929,0,0.418899,415.999304,0
-c32,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,0,11.960929,0,0,0.861623,0.418899,400.933575,0
-c33,0,109.549158,41.660867,60.905800,0,43.122319,0,18.327706,0,47.088742,0,0,11.960929,0.861623,0.418899,333.896043,0
-c34,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,0,11.960929,0,0,0.861623,0.418899,400.933575,0
-c35,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,0,0,11.960929,11.960929,0.861623,0.418899,412.894504,0
-c36,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0.861623,0.418899,429.694611,0
-c37,0,0,41.660867,0,0,0,43.122319,18.327706,0,47.088742,0,11.960929,11.960929,0,0.418899,174.540391,0
-c38,0,109.549158,41.660867,60.905800,23.915213,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0.418899,428.832988,0
-c39,0,109.549158,0,60.905800,23.915213,0,43.122319,0,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,368.833283,0
-c40,0,109.549158,41.660867,60.905800,0,0,0,18.327706,47.088742,0,11.960929,11.960929,11.960929,0.861623,0.418899,314.695582,0
-c41,0,109.549158,0,60.905800,23.915213,0,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,344.03867,0
-c42,0,109.549158,0,60.905800,23.915213,43.122319,43.122319,18.327706,0,47.088742,0,11.960929,11.960929,0.861623,0.418899,371.233637,0
-c43,0,109.549158,0,60.905800,23.915213,0,43.122319,18.327706,47.088742,47.088742,11.960929,0,11.960929,0.861623,0,374.781161,0
-c44,0,109.549158,41.660867,0,0,43.122319,43.122319,18.327706,47.088742,0,0,11.960929,0,0.861623,0,315.693663,0
-c45,0,109.549158,0,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,0,11.960929,0.861623,0.418899,375.20006,0
-c46,0,109.549158,0,60.905800,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,0,11.960929,0,0.861623,0,405.942551,0
-c47,0,109.549158,0,60.905800,23.915213,43.122319,0,18.327706,47.088742,0,11.960929,11.960929,0,0.861623,0,327.692419,0
-c48,0,109.549158,0,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,0,11.960929,0,0,373.919538,0
-c49,0,109.549158,0,60.905800,0,0,43.122319,0,0,47.088742,11.960929,11.960929,0,0.861623,0.418899,285.868399,0
-c50,0,109.549158,0,60.905800,0,0,43.122319,18.327706,0,0,0,11.960929,11.960929,0.861623,0,256.688464,0
-c51,0,109.549158,41.660867,0,0,0,43.122319,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0,313.293309,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,32.936441,0,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0,0.089271,161.876952,0
-c2,0,32.936441,13.739047,21.882908,9.597736,0,18.163317,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,157.734587,0
-c3,0,32.936441,0,0,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,122.112632,0
-c4,0,0,13.739047,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,0,0.281905,0.089271,119.949186,0
-c5,0,32.936441,13.739047,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,157.734587,0
-c6,0,0,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,138.112503,0
-c7,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,4.848960,0.281905,0,156.775078,0
-c8,0,32.936441,13.739047,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0,0.089271,157.452682,0
-c9,0,32.936441,0,21.882908,0,0,18.163317,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,134.397804,0
-c10,0,0,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,4.848960,0,0,0.089271,132.981638,0
-c11,0,0,13.739047,21.882908,0,18.163317,0,0,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0,106.681167,0
-c12,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,171.048944,0
-c13,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,0,19.033555,19.033555,4.848960,0,4.848960,0.281905,0,162.529701,0
-c14,0,0,0,0,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,107.339508,0
-c15,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,0,0,0.281905,0.089271,166.199984,0
-c16,0,32.936441,13.739047,21.882908,9.597736,0,18.163317,0,0,19.033555,4.848960,0,4.848960,0,0.089271,125.140195,0
-c17,0,32.936441,13.739047,21.882908,9.597736,0,0,8.429972,19.033555,19.033555,4.848960,4.848960,0,0.281905,0.089271,134.72231,0
-c18,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,0,19.033555,19.033555,4.848960,0,4.848960,0.281905,0.089271,162.618972,0
-c19,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,4.848960,0,0.281905,0.089271,166.199984,0
-c20,0,32.936441,13.739047,21.882908,0,0,18.163317,8.429972,0,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,129.103296,0
-c21,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,0,0,0.281905,0.089271,166.199984,0
-c22,0,32.936441,0,0,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,4.848960,0,0.281905,0.089271,130.578029,0
-c23,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,0,4.848960,0,0,0.281905,0.089271,147.166429,0
-c24,0,32.936441,13.739047,0,9.597736,0,18.163317,8.429972,0,19.033555,0,4.848960,0,0,0.089271,106.838299,0
-c25,0,32.936441,13.739047,21.882908,9.597736,18.163317,0,0,19.033555,19.033555,4.848960,0,4.848960,0.281905,0.089271,144.455655,0
-c26,0,32.936441,13.739047,21.882908,9.597736,0,0,8.429972,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,134.72231,0
-c27,0,32.936441,13.739047,21.882908,9.597736,0,0,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,126.292338,0
-c28,0,32.936441,13.739047,21.882908,9.597736,0,0,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,126.292338,0
-c29,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,4.848960,0,0.281905,0,166.110713,0
-c30,0,32.936441,13.739047,21.882908,9.597736,0,0,8.429972,0,19.033555,4.848960,0,4.848960,0.281905,0.089271,115.688755,0
-c31,0,32.936441,13.739047,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,0,4.848960,4.848960,0,0.089271,152.603722,0
-c32,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,0,4.848960,0,0,0.281905,0.089271,147.166429,0
-c33,0,32.936441,13.739047,21.882908,0,18.163317,0,8.429972,0,19.033555,0,0,4.848960,0.281905,0.089271,119.405376,0
-c34,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,0,4.848960,0,0,0.281905,0.089271,147.166429,0
-c35,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,0,0,4.848960,4.848960,0.281905,0.089271,152.015389,0
-c36,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0.281905,0.089271,157.770012,0
-c37,0,0,13.739047,0,0,0,18.163317,8.429972,0,19.033555,0,4.848960,4.848960,0,0.089271,69.153082,0
-c38,0,32.936441,13.739047,21.882908,9.597736,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0.089271,157.488107,0
-c39,0,32.936441,0,21.882908,9.597736,0,18.163317,0,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,135.565568,0
-c40,0,32.936441,13.739047,21.882908,0,0,0,8.429972,19.033555,0,4.848960,4.848960,4.848960,0.281905,0.089271,110.939979,0
-c41,0,32.936441,0,21.882908,9.597736,0,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,125.832223,0
-c42,0,32.936441,0,21.882908,9.597736,18.163317,18.163317,8.429972,0,19.033555,0,4.848960,4.848960,0.281905,0.089271,138.276342,0
-c43,0,32.936441,0,21.882908,9.597736,0,18.163317,8.429972,19.033555,19.033555,4.848960,0,4.848960,0.281905,0,139.057309,0
-c44,0,32.936441,13.739047,0,0,18.163317,18.163317,8.429972,19.033555,0,0,4.848960,0,0.281905,0,115.596514,0
-c45,0,32.936441,0,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,0,4.848960,0.281905,0.089271,139.14658,0
-c46,0,32.936441,0,21.882908,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,0,4.848960,0,0.281905,0,152.371666,0
-c47,0,32.936441,0,21.882908,9.597736,18.163317,0,8.429972,19.033555,0,4.848960,4.848960,0,0.281905,0,120.023754,0
-c48,0,32.936441,0,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,0,4.848960,0,0,138.775404,0
-c49,0,32.936441,0,21.882908,0,0,18.163317,0,0,19.033555,4.848960,4.848960,0,0.281905,0.089271,102.085317,0
-c50,0,32.936441,0,21.882908,0,0,18.163317,8.429972,0,0,0,4.848960,4.848960,0.281905,0,91.392463,0
-c51,0,32.936441,13.739047,0,0,0,18.163317,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0,112.88574,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c2,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c3,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c4,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c9,16.0868,0,0,0,0,144.6,0,0,0,0,0,0,0,0,0,160.6868,0
-c10,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c11,16.0868,436.945,0,0,0,0,0,0,0,0,0,0,0,0,0,453.0318,0
-c12,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c13,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c14,16.0868,0,203.92,0,0,0,0,0,0,0,0,0,0,0,0,220.0068,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,33.1375,0,0,33.1375,0
-c16,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c17,16.0868,0,0,0,0,0,152.501,0,0,0,0,0,0,0,0,168.5878,0
-c18,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,33.1375,0,0,33.1375,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c26,16.0868,0,0,0,0,0,124.457,0,0,0,0,0,0,0,0,140.5438,0
-c27,16.0868,0,0,0,0,0,0,59.0058,0,0,0,0,0,0,0,75.0926,0
-c28,16.0868,0,0,0,0,0,0,59.0058,0,0,0,0,0,0,0,75.0926,0
-c29,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c30,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c31,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c34,16.0868,0,0,0,0,0,0,0,0,0,0,0,33.1375,0,0,49.2243,0
-c35,16.0868,0,0,0,0,0,0,0,0,0,12.8278,0,0,0,0,28.9146,0
-c36,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c37,0,397.898,0,0,110.25,144.6,0,0,0,0,0,0,0,0,0,652.748,0
-c38,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,16.0868,0,0,0,0,122.714,152.501,0,0,0,0,0,0,0,0,291.3018,0
-c41,16.0868,0,0,0,0,0,124.457,0,0,0,0,0,0,0,0,140.5438,0
-c42,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c43,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c44,16.0868,0,0,0,0,0,0,0,0,0,37.3658,0,0,0,0,53.4526,0
-c45,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c46,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c49,16.0868,0,0,0,0,144.6,0,0,0,0,0,0,0,0,0,160.6868,0
-c50,0,0,0,0,0,144.6,0,0,0,65.7062,37.3658,0,0,0,0,247.672,0
-c51,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c2,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c3,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c4,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c9,7.62006,0,0,0,0,28.103,0,0,0,0,0,0,0,0,0,35.72306,0
-c10,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c11,7.62006,83.3307,0,0,0,0,0,0,0,0,0,0,0,0,0,90.95076,0
-c12,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c13,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c14,7.62006,0,41.6322,0,0,0,0,0,0,0,0,0,0,0,0,49.25226,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,6.20782,0,0,6.20782,0
-c16,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c17,7.62006,0,0,0,0,0,28.2053,0,0,0,0,0,0,0,0,35.82536,0
-c18,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,6.20782,0,0,6.20782,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c26,7.62006,0,0,0,0,0,20.8436,0,0,0,0,0,0,0,0,28.46366,0
-c27,7.62006,0,0,0,0,0,0,10.5597,0,0,0,0,0,0,0,18.17976,0
-c28,7.62006,0,0,0,0,0,0,10.5597,0,0,0,0,0,0,0,18.17976,0
-c29,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c30,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c31,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c34,7.62006,0,0,0,0,0,0,0,0,0,0,0,6.20782,0,0,13.82788,0
-c35,7.62006,0,0,0,0,0,0,0,0,0,2.77964,0,0,0,0,10.3997,0
-c36,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c37,0,109.635,0,0,20.8418,28.103,0,0,0,0,0,0,0,0,0,158.5798,0
-c38,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,7.62006,0,0,0,0,20.8654,28.2053,0,0,0,0,0,0,0,0,56.69076,0
-c41,7.62006,0,0,0,0,0,20.8436,0,0,0,0,0,0,0,0,28.46366,0
-c42,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c43,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c44,7.62006,0,0,0,0,0,0,0,0,0,6.79928,0,0,0,0,14.41934,0
-c45,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c46,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c49,7.62006,0,0,0,0,28.103,0,0,0,0,0,0,0,0,0,35.72306,0
-c50,0,0,0,0,0,28.103,0,0,0,10.5438,6.79928,0,0,0,0,45.44608,0
-c51,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,450.60246,123.468921,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,12.419968,0.107291,1702.49886,5.5826302284496
-c2,450.60246,123.468921,59.178279,104.457756,51.115816,925.6073,100.738165,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,2148.775935,4.42317952322099
-c3,450.60246,123.468921,426.5549,652.9992,51.115816,100.738165,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,2729.3886,3.48225306201348
-c4,450.60246,1610.85,59.178279,104.457756,51.115816,100.738165,971.2723,49.948052,102.069715,102.069715,26.069312,26.069312,351.94343,1.204529,0.107291,4007.696132,2.37154255407649
-c5,352.4661,123.468921,59.178279,104.457756,51.115816,100.738165,971.2723,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,2096.304575,4.53389350858553
-c6,450.60246,1610.85,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,344.11179,26.069312,26.069312,1.204529,0.107291,3129.330357,3.03720629335887
-c7,352.4661,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,857.91333,102.069715,26.069312,26.069312,26.069312,1.204529,1.669613,1983.176377,4.79252465336699
-c8,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,12.419968,0.107291,1824.685974,5.20879853988517
-c9,450.60246,123.468921,426.5549,104.457756,558.563,572.2717,100.738165,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,2670.26414,3.55935641710125
-c10,450.60246,1610.85,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,344.11179,26.069312,351.94343,12.419968,0.107291,3466.419914,2.74185532036338
-c11,450.60246,1633.4297,59.178279,104.457756,333.7788,100.738165,971.2723,490.07014,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,1.04789,4428.127385,2.14637500618391
-c12,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,1506.209648,6.31015844282001
-c13,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,290.68457,102.069715,102.069715,26.069312,344.22248,26.069312,1.204529,1.04789,1883.737085,5.04551390378802
-c14,450.60246,1610.85,743.3577,1159.5933,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,4550.602844,2.08860721863938
-c15,352.4661,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,344.22248,216.53911,1.204529,0.107291,1734.393406,5.47996871824081
-c16,450.60246,123.468921,59.178279,104.457756,51.115816,544.1687,100.738165,290.68457,527.54432,102.069715,26.069312,344.22248,26.069312,12.419968,0.107291,2762.917065,3.43999533478594
-c17,450.60246,123.468921,59.178279,104.457756,51.115816,925.6073,618.5072,49.948052,102.069715,102.069715,26.069312,26.069312,351.94343,1.204529,0.107291,2992.419088,3.1761666935281
-c18,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,490.07014,102.069715,102.069715,26.069312,344.22248,26.069312,1.204529,0.107291,2082.182056,4.56464489939659
-c19,352.4661,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,344.11179,26.069312,351.94343,1.204529,0.107291,1869.687036,5.08342918716001
-c20,352.4661,123.468921,59.178279,104.457756,558.563,925.6073,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,2983.561364,3.18559623211436
-c21,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,344.22248,351.94343,1.204529,0.107291,1967.934086,4.82964431718043
-c22,352.4661,123.468921,426.5549,652.9992,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,344.11179,26.069312,351.94343,1.204529,0.107291,2785.605101,3.4119774599028
-c23,352.4661,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,543.5573,26.069312,344.22248,216.53911,1.204529,0.107291,2175.880991,4.36807976194688
-c24,352.4661,123.468921,59.178279,652.9992,51.115816,544.1687,100.738165,49.948052,857.91333,102.069715,344.11179,26.069312,210.33129,7.640322,0.107291,3482.326283,2.72933123224711
-c25,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,971.2723,490.07014,102.069715,102.069715,26.069312,344.22248,26.069312,1.204529,0.107291,2952.716191,3.21887415562744
-c26,450.60246,123.468921,59.178279,104.457756,51.115816,544.1687,992.1159,49.948052,102.069715,102.069715,344.11179,26.069312,26.069312,1.204529,0.107291,2976.757548,3.19287737931429
-c27,450.60246,123.468921,59.178279,104.457756,51.115816,544.1687,590.3019,500.62984,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,2889.885706,3.28885734455903
-c28,450.60246,123.468921,59.178279,104.457756,51.115816,544.1687,590.3019,500.62984,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,2889.885706,3.28885734455903
-c29,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,208.37216,26.069312,351.94343,1.204529,1.04789,1833.024365,5.1851038212957
-c30,450.60246,123.468921,59.178279,104.457756,51.115816,925.6073,971.2723,49.948052,857.91333,102.069715,26.069312,344.22248,26.069312,1.204529,0.107291,4093.306853,2.32194220153321
-c31,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,971.2723,49.948052,102.069715,102.069715,208.37216,26.069312,26.069312,7.640322,0.107291,2383.179576,3.98812655785673
-c32,352.4661,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,543.5573,26.069312,206.54856,210.33129,1.204529,0.107291,2031.999251,4.67737460315754
-c33,450.60246,123.468921,59.178279,104.457756,333.7788,100.738165,590.3019,49.948052,857.91333,102.069715,344.11179,344.22248,26.069312,1.204529,0.107291,3488.17278,2.72475662330131
-c34,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,543.5573,26.069312,344.22248,216.53911,1.204529,0.107291,2274.017351,4.179573095985
-c35,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,543.5573,346.89143,26.069312,26.069312,1.204529,0.107291,2086.216503,4.5558175236131
-c36,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,1.204529,0.107291,1927.425414,4.93114887655264
-c37,352.4661,1720.485,59.178279,652.9992,579.4048,572.2717,100.738165,49.948052,857.91333,102.069715,208.37216,26.069312,26.069312,7.640322,0.107291,5315.732738,1.78797965354029
-c38,450.60246,123.468921,59.178279,104.457756,51.115816,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,7.640322,0.107291,1933.861207,4.91473826152725
-c39,352.4661,123.468921,701.7255,104.457756,51.115816,925.6073,100.738165,290.68457,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,2933.923314,3.23949224872303
-c40,450.60246,123.468921,59.178279,104.457756,333.7788,946.4727,618.5072,49.948052,102.069715,543.5573,26.069312,26.069312,26.069312,1.204529,0.107291,3411.560939,2.78594521667592
-c41,450.60246,123.468921,701.7255,104.457756,51.115816,544.1687,992.1159,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,3301.262291,2.8790265759884
-c42,450.60246,123.468921,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,857.91333,102.069715,344.11179,26.069312,26.069312,1.204529,0.107291,2765.169514,3.43719318694929
-c43,450.60246,123.468921,701.7255,104.457756,51.115816,544.1687,100.738165,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,1.204529,1.669613,2591.926126,3.66693390523994
-c44,450.60246,123.468921,59.178279,1159.5933,558.563,100.738165,100.738165,49.948052,102.069715,883.36663,215.17144,26.069312,210.33129,1.204529,1.669613,4042.712871,2.35100098032658
-c45,450.60246,123.468921,426.5549,104.457756,51.115816,100.738165,590.3019,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,1.204529,0.107291,2361.326404,4.02503514101072
-c46,450.60246,123.468921,426.5549,104.457756,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,344.11179,26.069312,351.94343,1.204529,1.669613,2336.762339,4.06734634183325
-c47,352.4661,123.468921,426.5549,104.457756,51.115816,100.738165,971.2723,49.948052,102.069715,883.36663,26.069312,26.069312,210.33129,1.204529,1.04789,3430.180688,2.77082251502599
-c48,450.60246,123.468921,701.7255,104.457756,51.115816,100.738165,971.2723,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,3025.465519,3.14147419105079
-c49,450.60246,123.468921,426.5549,104.457756,558.563,572.2717,100.738165,490.07014,857.91333,102.069715,26.069312,26.069312,210.33129,1.204529,0.107291,4050.491821,2.34648589439811
-c50,352.4661,123.468921,701.7255,104.457756,558.563,572.2717,100.738165,49.948052,527.54432,893.91043,215.17144,26.069312,26.069312,1.204529,1.669613,4255.27815,2.23356067444003
-c51,450.60246,123.468921,59.178279,652.9992,333.7788,544.1687,100.738165,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,1.04789,3022.521728,3.14453383593562
-c12,1506.209648
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_results1.csv b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_results1.csv
deleted file mode 100644
index fa38a02ff27be3de0da8d5ce5598fcea3f304bb2..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_results1.csv
+++ /dev/null
@@ -1,209 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,174.570537,87.285268,6973.454,3475.891,157.113483,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,69.37799,1.812953,16815.616687,3.47975858299987
-c2,1108.984,174.570537,87.285268,3356.913,1731.6018,6007.756,157.113483,78.556742,3203.344,157.113483,39.466907,39.466907,1286.6509,69.37799,1.812953,17500.01397,3.34367084557435
-c3,1108.984,174.570537,87.285268,157.113483,3475.891,3053.2545,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,69.37799,1.812953,12895.417187,4.53760320745796
-c4,1108.984,174.570537,87.285268,157.113483,1731.6018,3053.2545,157.113483,78.556742,6053.9348,157.113483,39.466907,39.466907,2353.8847,69.37799,1.812953,15263.537553,3.83359927254474
-c5,1108.984,174.570537,87.285268,6973.454,1731.6018,157.113483,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,69.37799,1.812953,15071.327487,3.88249054406278
-c6,1108.984,174.570537,87.285268,157.113483,3475.891,3053.2545,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,69.37799,1.812953,12895.417187,4.53760320745796
-c7,1108.984,174.570537,87.285268,3356.913,1731.6018,6007.756,157.113483,78.556742,3203.344,157.113483,39.466907,39.466907,1286.6509,69.37799,1.812953,17500.01397,3.34367084557435
-c8,1108.984,174.570537,87.285268,6973.454,3475.891,157.113483,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,69.37799,1.812953,16815.616687,3.47975858299987
-c9,1108.984,174.570537,87.285268,157.113483,1731.6018,3053.2545,157.113483,78.556742,6053.9348,157.113483,39.466907,39.466907,2353.8847,69.37799,1.812953,15263.537553,3.83359927254474
-c10,1108.984,174.570537,87.285268,6973.454,1731.6018,157.113483,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,69.37799,1.812953,15071.327487,3.88249054406278
-c11,1108.984,174.570537,87.285268,6973.454,3475.891,157.113483,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,69.37799,1.812953,16815.616687,3.47975858299987
-c12,1108.984,174.570537,87.285268,157.113483,3475.891,3053.2545,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,69.37799,1.812953,12895.417187,4.53760320745796
-c13,1108.984,174.570537,87.285268,6973.454,1731.6018,157.113483,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,69.37799,1.812953,15071.327487,3.88249054406278
-c14,1108.984,174.570537,87.285268,3356.913,1731.6018,6007.756,157.113483,78.556742,3203.344,157.113483,39.466907,39.466907,1286.6509,69.37799,1.812953,17500.01397,3.34367084557435
-c3,12895.417187
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,90.532480,45.439232,1159.5933,558.563,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,12.419968,1.04789,3278.109628,2.89936059089722
-c2,352.4661,90.532480,45.439232,652.9992,333.7788,925.6073,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,12.419968,1.04789,3401.736372,2.79399131479812
-c3,352.4661,90.532480,45.439232,82.574848,558.563,544.1687,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,12.419968,1.04789,2662.685028,3.5694878294299
-c4,352.4661,90.532480,45.439232,82.574848,333.7788,544.1687,82.574848,41.518080,857.91333,83.036160,21.220352,21.220352,351.94343,12.419968,1.04789,2921.85457,3.25287299727334
-c5,352.4661,90.532480,45.439232,1159.5933,333.7788,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,12.419968,1.04789,3053.325428,3.11280997418759
-c6,352.4661,90.532480,45.439232,82.574848,558.563,544.1687,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,12.419968,1.04789,2662.685028,3.5694878294299
-c7,352.4661,90.532480,45.439232,652.9992,333.7788,925.6073,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,12.419968,1.04789,3401.736372,2.79399131479812
-c8,352.4661,90.532480,45.439232,1159.5933,558.563,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,12.419968,1.04789,3278.109628,2.89936059089722
-c9,352.4661,90.532480,45.439232,82.574848,333.7788,544.1687,82.574848,41.518080,857.91333,83.036160,21.220352,21.220352,351.94343,12.419968,1.04789,2921.85457,3.25287299727334
-c10,352.4661,90.532480,45.439232,1159.5933,333.7788,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,12.419968,1.04789,3053.325428,3.11280997418759
-c11,352.4661,90.532480,45.439232,1159.5933,558.563,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,12.419968,1.04789,3278.109628,2.89936059089722
-c12,352.4661,90.532480,45.439232,82.574848,558.563,544.1687,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,12.419968,1.04789,2662.685028,3.5694878294299
-c13,352.4661,90.532480,45.439232,1159.5933,333.7788,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,12.419968,1.04789,3053.325428,3.11280997418759
-c14,352.4661,90.532480,45.439232,652.9992,333.7788,925.6073,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,12.419968,1.04789,3401.736372,2.79399131479812
-c3,2662.685028
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,472.88204,219.119355,6973.454,3475.891,352.195669,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,69.37799,1.812953,18086.77552,3.23519725529718
-c2,1108.984,472.88204,219.119355,3356.913,1731.6018,6130.47,352.195669,171.815177,3203.344,357.330128,89.882488,89.882488,1286.6509,70.74902,1.812953,18643.633018,3.13856674139901
-c3,1108.984,472.88204,219.119355,375.615141,3475.891,3197.8545,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,69.37799,1.812953,14334.595492,4.08203262292633
-c4,1108.984,472.88204,219.119355,375.615141,1731.6018,3053.2545,352.195669,171.815177,6053.9348,357.330128,89.882488,89.882488,2353.8847,69.37799,1.812953,16501.573229,3.54598229371053
-c5,1108.984,472.88204,219.119355,6973.454,1802.5061,352.195669,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,69.37799,1.812953,16413.39062,3.56503344380265
-c6,1108.984,472.88204,219.119355,375.615141,3475.891,3197.8545,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,69.37799,1.812953,14334.595492,4.08203262292633
-c7,1108.984,472.88204,219.119355,3356.913,1731.6018,6130.47,352.195669,171.815177,3203.344,357.330128,89.882488,89.882488,1286.6509,70.74902,1.812953,18643.633018,3.13856674139901
-c8,1108.984,472.88204,219.119355,6973.454,3475.891,352.195669,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,69.37799,1.812953,18086.77552,3.23519725529718
-c9,1108.984,472.88204,219.119355,375.615141,1731.6018,3053.2545,352.195669,171.815177,6053.9348,357.330128,89.882488,89.882488,2353.8847,69.37799,1.812953,16501.573229,3.54598229371053
-c10,1108.984,472.88204,219.119355,6973.454,1802.5061,352.195669,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,69.37799,1.812953,16413.39062,3.56503344380265
-c11,1108.984,472.88204,219.119355,6973.454,3475.891,352.195669,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,69.37799,1.812953,18086.77552,3.23519725529718
-c12,1108.984,472.88204,219.119355,375.615141,3475.891,3197.8545,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,69.37799,1.812953,14334.595492,4.08203262292633
-c13,1108.984,472.88204,219.119355,6973.454,1802.5061,352.195669,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,69.37799,1.812953,16413.39062,3.56503344380265
-c14,1108.984,472.88204,219.119355,3356.913,1731.6018,6130.47,352.195669,171.815177,3203.344,357.330128,89.882488,89.882488,1286.6509,70.74902,1.812953,18643.633018,3.13856674139901
-c3,14334.595492
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,188.762345,90.173220,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,927.565757,0
-c2,0,188.762345,90.173220,0,0,0,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,735.863368,0
-c3,0,188.762345,90.173220,157.595858,0,0,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,933.201748,0
-c4,0,188.762345,90.173220,157.595858,0,0,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,893.459226,0
-c5,0,188.762345,90.173220,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,927.565757,0
-c6,0,188.762345,90.173220,157.595858,0,0,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,933.201748,0
-c7,0,188.762345,90.173220,0,0,0,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,735.863368,0
-c8,0,188.762345,90.173220,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,927.565757,0
-c9,0,188.762345,90.173220,157.595858,0,0,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,893.459226,0
-c10,0,188.762345,90.173220,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,927.565757,0
-c11,0,188.762345,90.173220,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,927.565757,0
-c12,0,188.762345,90.173220,157.595858,0,0,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,933.201748,0
-c13,0,188.762345,90.173220,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,927.565757,0
-c14,0,188.762345,90.173220,0,0,0,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,735.863368,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,109.549158,41.660867,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,343.593076,0
-c2,0,109.549158,41.660867,0,0,0,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,283.67065,0
-c3,0,109.549158,41.660867,60.905800,0,0,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,361.376557,0
-c4,0,109.549158,41.660867,60.905800,0,0,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,344.57645,0
-c5,0,109.549158,41.660867,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,343.593076,0
-c6,0,109.549158,41.660867,60.905800,0,0,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,361.376557,0
-c7,0,109.549158,41.660867,0,0,0,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,283.67065,0
-c8,0,109.549158,41.660867,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,343.593076,0
-c9,0,109.549158,41.660867,60.905800,0,0,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,344.57645,0
-c10,0,109.549158,41.660867,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,343.593076,0
-c11,0,109.549158,41.660867,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,343.593076,0
-c12,0,109.549158,41.660867,60.905800,0,0,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,361.376557,0
-c13,0,109.549158,41.660867,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,343.593076,0
-c14,0,109.549158,41.660867,0,0,0,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,283.67065,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,32.936441,13.739047,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,125.918192,0
-c2,0,32.936441,13.739047,0,0,0,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,102.000252,0
-c3,0,32.936441,13.739047,21.882908,0,0,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,129.637783,0
-c4,0,32.936441,13.739047,21.882908,0,0,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,123.88316,0
-c5,0,32.936441,13.739047,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,125.918192,0
-c6,0,32.936441,13.739047,21.882908,0,0,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,129.637783,0
-c7,0,32.936441,13.739047,0,0,0,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,102.000252,0
-c8,0,32.936441,13.739047,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,125.918192,0
-c9,0,32.936441,13.739047,21.882908,0,0,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,123.88316,0
-c10,0,32.936441,13.739047,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,125.918192,0
-c11,0,32.936441,13.739047,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,125.918192,0
-c12,0,32.936441,13.739047,21.882908,0,0,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,129.637783,0
-c13,0,32.936441,13.739047,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,125.918192,0
-c14,0,32.936441,13.739047,0,0,0,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,102.000252,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,122.714,0,0,0,0,0,0,0,1.37103,0,124.08503,0
-c3,0,0,0,0,0,144.6,0,0,0,0,0,0,0,0,0,144.6,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,70.9043,0,0,0,0,0,0,0,0,0,0,70.9043,0
-c6,0,0,0,0,0,144.6,0,0,0,0,0,0,0,0,0,144.6,0
-c7,0,0,0,0,0,122.714,0,0,0,0,0,0,0,1.37103,0,124.08503,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,70.9043,0,0,0,0,0,0,0,0,0,0,70.9043,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,144.6,0,0,0,0,0,0,0,0,0,144.6,0
-c13,0,0,0,0,70.9043,0,0,0,0,0,0,0,0,0,0,70.9043,0
-c14,0,0,0,0,0,122.714,0,0,0,0,0,0,0,1.37103,0,124.08503,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,20.8654,0,0,0,0,0,0,0,0.810781,0,21.676181,0
-c3,0,0,0,0,0,28.103,0,0,0,0,0,0,0,0,0,28.103,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,14.8008,0,0,0,0,0,0,0,0,0,0,14.8008,0
-c6,0,0,0,0,0,28.103,0,0,0,0,0,0,0,0,0,28.103,0
-c7,0,0,0,0,0,20.8654,0,0,0,0,0,0,0,0.810781,0,21.676181,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,14.8008,0,0,0,0,0,0,0,0,0,0,14.8008,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,28.103,0,0,0,0,0,0,0,0,0,28.103,0
-c13,0,0,0,0,14.8008,0,0,0,0,0,0,0,0,0,0,14.8008,0
-c14,0,0,0,0,0,20.8654,0,0,0,0,0,0,0,0.810781,0,21.676181,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,123.468921,59.178279,1159.5933,558.563,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,12.419968,1.04789,3404.02782,2.79211051770692
-c2,352.4661,123.468921,59.178279,652.9992,333.7788,946.4727,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,13.230749,1.04789,3525.412805,2.69597417781053
-c3,352.4661,123.468921,59.178279,104.457756,558.563,572.2717,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,12.419968,1.04789,2820.425811,3.36985351075226
-c4,352.4661,123.468921,59.178279,104.457756,333.7788,544.1687,100.738165,49.948052,857.91333,102.069715,26.069312,26.069312,351.94343,12.419968,1.04789,3045.73773,3.12056476574676
-c5,352.4661,123.468921,59.178279,1159.5933,348.5796,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,12.419968,1.04789,3194.04442,2.975669906442
-c6,352.4661,123.468921,59.178279,104.457756,558.563,572.2717,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,12.419968,1.04789,2820.425811,3.36985351075226
-c7,352.4661,123.468921,59.178279,652.9992,333.7788,946.4727,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,13.230749,1.04789,3525.412805,2.69597417781053
-c8,352.4661,123.468921,59.178279,1159.5933,558.563,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,12.419968,1.04789,3404.02782,2.79211051770692
-c9,352.4661,123.468921,59.178279,104.457756,333.7788,544.1687,100.738165,49.948052,857.91333,102.069715,26.069312,26.069312,351.94343,12.419968,1.04789,3045.73773,3.12056476574676
-c10,352.4661,123.468921,59.178279,1159.5933,348.5796,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,12.419968,1.04789,3194.04442,2.975669906442
-c11,352.4661,123.468921,59.178279,1159.5933,558.563,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,12.419968,1.04789,3404.02782,2.79211051770692
-c12,352.4661,123.468921,59.178279,104.457756,558.563,572.2717,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,12.419968,1.04789,2820.425811,3.36985351075226
-c13,352.4661,123.468921,59.178279,1159.5933,348.5796,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,12.419968,1.04789,3194.04442,2.975669906442
-c14,352.4661,123.468921,59.178279,652.9992,333.7788,946.4727,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,13.230749,1.04789,3525.412805,2.69597417781053
-c3,2820.425811
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_results2.csv b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_results2.csv
deleted file mode 100644
index 03e64efef1e92da5898265ce6982aebb2ff9a7c7..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_results2.csv
+++ /dev/null
@@ -1,385 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,6358.145,1949.97,3356.913,78.556742,3053.2545,69.626370,78.556742,157.113483,69.626370,1299.926,1273.3007,39.466907,57.16078,10.00372,18960.604314,3.08609818365255
-c2,1108.984,86.311660,1949.97,3356.913,78.556742,118.016247,157.113483,78.556742,3203.344,69.626370,17.490144,17.490144,1286.6509,57.16078,10.00372,11596.187932,5.04599327654297
-c3,1108.984,174.570537,38.681317,3356.913,78.556742,3053.2545,69.626370,78.556742,3203.344,69.626370,17.490144,1273.3007,39.466907,57.16078,10.00372,12629.535829,4.63313039940282
-c4,1108.984,6358.145,38.681317,3356.913,78.556742,77.680494,69.626370,78.556742,157.113483,157.113483,17.490144,1273.3007,39.466907,57.16078,10.00372,12878.792882,4.54346047217176
-c5,1108.984,6358.145,87.285268,157.113483,78.556742,118.016247,157.113483,78.556742,3203.344,69.626370,17.490144,17.490144,39.466907,57.16078,10.00372,11558.35303,5.06251073875955
-c6,1108.984,6358.145,43.155830,157.113483,34.813185,118.016247,3366.9575,78.556742,69.626370,157.113483,17.490144,17.490144,39.466907,57.16078,10.00372,11634.093535,5.02955268186648
-c7,1108.984,6358.145,38.681317,3356.913,1731.6018,69.626370,69.626370,78.556742,157.113483,3427.0759,1299.926,19.513340,39.466907,57.16078,10.00372,17822.394729,3.2831887860428
-c8,1108.984,174.570537,1949.97,3356.913,78.556742,3053.2545,69.626370,78.556742,157.113483,69.626370,17.490144,1273.3007,29.645681,57.16078,10.00372,11484.772769,5.09494506425489
-c9,1108.984,6358.145,38.681317,3356.913,78.556742,69.626370,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,15845.198385,3.69287181213874
-c10,1108.984,174.570537,38.681317,3356.913,78.556742,118.016247,69.626370,78.556742,3203.344,69.626370,39.466907,17.490144,1286.6509,57.16078,10.00372,9707.647776,6.02764826149737
-c11,1108.984,6358.145,38.681317,3356.913,78.556742,69.626370,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,15845.198385,3.69287181213874
-c12,1108.984,6358.145,38.681317,3356.913,78.556742,77.680494,69.626370,78.556742,157.113483,157.113483,17.490144,1273.3007,39.466907,57.16078,10.00372,12878.792882,4.54346047217176
-c13,1108.984,174.570537,38.681317,3356.913,78.556742,3053.2545,69.626370,78.556742,3203.344,69.626370,17.490144,1273.3007,39.466907,57.16078,10.00372,12629.535829,4.63313039940282
-c14,1108.984,6358.145,38.681317,3356.913,1731.6018,69.626370,69.626370,78.556742,157.113483,3427.0759,1299.926,19.513340,39.466907,57.16078,10.00372,17822.394729,3.2831887860428
-c15,1108.984,86.311660,1949.97,3356.913,78.556742,118.016247,157.113483,78.556742,3203.344,69.626370,17.490144,17.490144,1286.6509,57.16078,10.00372,11596.187932,5.04599327654297
-c16,1108.984,6358.145,43.155830,157.113483,34.813185,118.016247,3366.9575,78.556742,69.626370,157.113483,17.490144,17.490144,39.466907,57.16078,10.00372,11634.093535,5.02955268186648
-c17,1108.984,174.570537,1949.97,3356.913,78.556742,3053.2545,69.626370,78.556742,157.113483,69.626370,17.490144,1273.3007,29.645681,57.16078,10.00372,11484.772769,5.09494506425489
-c18,1108.984,174.570537,38.681317,3356.913,78.556742,118.016247,69.626370,78.556742,3203.344,69.626370,39.466907,17.490144,1286.6509,57.16078,10.00372,9707.647776,6.02764826149737
-c19,1108.984,86.311660,43.155830,3356.913,34.813185,118.016247,69.626370,78.556742,3203.344,69.626370,17.490144,17.490144,39.466907,57.16078,10.00372,8310.959099,7.04061774843518
-c20,1108.984,6358.145,87.285268,157.113483,78.556742,118.016247,157.113483,78.556742,3203.344,69.626370,17.490144,17.490144,39.466907,57.16078,10.00372,11558.35303,5.06251073875955
-c21,1108.984,174.570537,38.681317,3356.913,78.556742,3053.2545,3366.9575,78.556742,157.113483,69.626370,17.490144,1273.3007,39.466907,57.16078,10.00372,12880.636442,4.54281018272676
-c22,1108.984,6358.145,38.681317,3356.913,1731.6018,69.626370,69.626370,78.556742,157.113483,3427.0759,1299.926,19.513340,39.466907,57.16078,10.00372,17822.394729,3.2831887860428
-c23,1108.984,6358.145,87.285268,157.113483,78.556742,118.016247,157.113483,78.556742,3203.344,69.626370,17.490144,17.490144,39.466907,57.16078,10.00372,11558.35303,5.06251073875955
-c24,1108.984,6358.145,43.155830,3356.913,1731.6018,3053.2545,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,20486.346086,2.8562578368898
-c25,1108.984,86.311660,1949.97,3356.913,78.556742,118.016247,157.113483,78.556742,3203.344,69.626370,17.490144,17.490144,1286.6509,57.16078,10.00372,11596.187932,5.04599327654297
-c26,1108.984,174.570537,1949.97,3356.913,78.556742,3053.2545,69.626370,78.556742,157.113483,69.626370,17.490144,1273.3007,29.645681,57.16078,10.00372,11484.772769,5.09494506425489
-c27,1108.984,6358.145,43.155830,157.113483,34.813185,118.016247,3366.9575,78.556742,69.626370,157.113483,17.490144,17.490144,39.466907,57.16078,10.00372,11634.093535,5.02955268186648
-c28,1108.984,6358.145,38.681317,3356.913,78.556742,69.626370,157.113483,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,15845.198385,3.69287181213874
-c29,1108.984,6358.145,38.681317,3356.913,78.556742,77.680494,69.626370,78.556742,157.113483,157.113483,17.490144,1273.3007,39.466907,57.16078,10.00372,12878.792882,4.54346047217176
-c30,1108.984,174.570537,38.681317,3356.913,78.556742,118.016247,69.626370,78.556742,3203.344,69.626370,39.466907,17.490144,1286.6509,57.16078,10.00372,9707.647776,6.02764826149737
-c19,8310.959099
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,1610.85,426.5549,652.9992,41.518080,544.1687,82.574848,41.518080,83.036160,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,4364.173235,2.17782874978315
-c2,352.4661,90.532480,426.5549,652.9992,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,7.640322,1.669613,2643.400945,3.59552788101436
-c3,352.4661,90.532480,45.439232,652.9992,41.518080,544.1687,82.574848,41.518080,527.54432,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,2720.096399,3.49414888828177
-c4,352.4661,1610.85,45.439232,652.9992,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,3334.311907,2.850489737627
-c5,352.4661,1610.85,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,21.220352,7.640322,1.669613,3023.067507,3.1439661276487
-c6,352.4661,1610.85,45.439232,82.574848,41.518080,82.574848,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,7.640322,1.669613,3086.286399,3.07956573736092
-c7,352.4661,1610.85,45.439232,652.9992,333.7788,82.574848,82.574848,41.518080,83.036160,543.5573,208.37216,21.220352,21.220352,7.640322,1.669613,4088.917367,2.32443482528233
-c8,352.4661,90.532480,426.5549,652.9992,41.518080,544.1687,82.574848,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,2656.703907,3.57752392925871
-c9,352.4661,1610.85,45.439232,652.9992,41.518080,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,3770.630205,2.52064545956594
-c10,352.4661,90.532480,45.439232,652.9992,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,7.640322,1.669613,2262.285277,4.20124810717017
-c11,352.4661,1610.85,45.439232,652.9992,41.518080,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,3770.630205,2.52064545956594
-c12,352.4661,1610.85,45.439232,652.9992,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,3334.311907,2.850489737627
-c13,352.4661,90.532480,45.439232,652.9992,41.518080,544.1687,82.574848,41.518080,527.54432,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,2720.096399,3.49414888828177
-c14,352.4661,1610.85,45.439232,652.9992,333.7788,82.574848,82.574848,41.518080,83.036160,543.5573,208.37216,21.220352,21.220352,7.640322,1.669613,4088.917367,2.32443482528233
-c15,352.4661,90.532480,426.5549,652.9992,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,7.640322,1.669613,2643.400945,3.59552788101436
-c16,352.4661,1610.85,45.439232,82.574848,41.518080,82.574848,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,7.640322,1.669613,3086.286399,3.07956573736092
-c17,352.4661,90.532480,426.5549,652.9992,41.518080,544.1687,82.574848,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,2656.703907,3.57752392925871
-c18,352.4661,90.532480,45.439232,652.9992,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,7.640322,1.669613,2262.285277,4.20124810717017
-c19,352.4661,90.532480,45.439232,652.9992,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,21.220352,7.640322,1.669613,2073.174339,4.58447778402307
-c20,352.4661,1610.85,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,21.220352,7.640322,1.669613,3023.067507,3.1439661276487
-c21,352.4661,90.532480,45.439232,652.9992,41.518080,544.1687,590.3019,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,2783.315291,3.41478446486268
-c22,352.4661,1610.85,45.439232,652.9992,333.7788,82.574848,82.574848,41.518080,83.036160,543.5573,208.37216,21.220352,21.220352,7.640322,1.669613,4088.917367,2.32443482528233
-c23,352.4661,1610.85,45.439232,82.574848,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,21.220352,7.640322,1.669613,3023.067507,3.1439661276487
-c24,352.4661,1610.85,45.439232,652.9992,333.7788,544.1687,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,4524.484777,2.1006639244868
-c25,352.4661,90.532480,426.5549,652.9992,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,7.640322,1.669613,2643.400945,3.59552788101436
-c26,352.4661,90.532480,426.5549,652.9992,41.518080,544.1687,82.574848,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,2656.703907,3.57752392925871
-c27,352.4661,1610.85,45.439232,82.574848,41.518080,82.574848,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,7.640322,1.669613,3086.286399,3.07956573736092
-c28,352.4661,1610.85,45.439232,652.9992,41.518080,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,3770.630205,2.52064545956594
-c29,352.4661,1610.85,45.439232,652.9992,41.518080,82.574848,82.574848,41.518080,83.036160,83.036160,21.220352,206.54856,21.220352,7.640322,1.669613,3334.311907,2.850489737627
-c30,352.4661,90.532480,45.439232,652.9992,41.518080,82.574848,82.574848,41.518080,527.54432,83.036160,21.220352,21.220352,210.33129,7.640322,1.669613,2262.285277,4.20124810717017
-c19,2073.174339
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,6756.043,1949.97,3356.913,179.180675,3053.2545,264.708556,171.815177,357.330128,269.843015,1299.926,1273.3007,89.882488,57.16078,10.00372,20198.315739,2.89698840781653
-c2,1108.984,384.623163,1949.97,3356.913,179.180675,313.098433,352.195669,171.815177,3203.344,269.843015,67.905725,67.905725,1286.6509,57.16078,10.00372,12779.593982,4.57872812450413
-c3,1108.984,472.88204,170.515404,3356.913,179.180675,3053.2545,264.708556,171.815177,3203.344,269.843015,67.905725,1273.3007,89.882488,57.16078,10.00372,13749.69378,4.25567924302035
-c4,1108.984,6756.043,170.515404,3356.913,179.180675,272.76268,264.708556,171.815177,357.330128,357.330128,67.905725,1273.3007,89.882488,57.16078,10.00372,14493.836161,4.03718420639608
-c5,1108.984,6756.043,219.119355,375.615141,179.180675,313.098433,352.195669,171.815177,3203.344,269.843015,67.905725,67.905725,89.882488,57.16078,10.00372,13242.096903,4.41880820158194
-c6,1108.984,6756.043,174.989917,375.615141,135.437118,313.098433,3366.9575,171.815177,269.843015,357.330128,67.905725,67.905725,89.882488,57.16078,10.00372,13322.971867,4.39198453527753
-c7,1108.984,6756.043,170.515404,3356.913,1731.6018,264.708556,264.708556,171.815177,357.330128,3427.0759,1299.926,69.928921,89.882488,57.16078,10.00372,19136.59743,3.05771633391299
-c8,1108.984,472.88204,1949.97,3356.913,179.180675,3053.2545,264.708556,171.815177,357.330128,269.843015,67.905725,1273.3007,80.061262,57.16078,10.00372,12673.313278,4.61712616880261
-c9,1108.984,6756.043,170.515404,3356.913,179.180675,264.708556,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,17316.567648,3.37909265245465
-c10,1108.984,472.88204,170.515404,3356.913,179.180675,313.098433,264.708556,171.815177,3203.344,269.843015,89.882488,67.905725,1286.6509,57.16078,10.00372,11022.887913,5.30843520990056
-c11,1108.984,6756.043,170.515404,3356.913,179.180675,264.708556,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,17316.567648,3.37909265245465
-c12,1108.984,6756.043,170.515404,3356.913,179.180675,272.76268,264.708556,171.815177,357.330128,357.330128,67.905725,1273.3007,89.882488,57.16078,10.00372,14493.836161,4.03718420639608
-c13,1108.984,472.88204,170.515404,3356.913,179.180675,3053.2545,264.708556,171.815177,3203.344,269.843015,67.905725,1273.3007,89.882488,57.16078,10.00372,13749.69378,4.25567924302035
-c14,1108.984,6756.043,170.515404,3356.913,1731.6018,264.708556,264.708556,171.815177,357.330128,3427.0759,1299.926,69.928921,89.882488,57.16078,10.00372,19136.59743,3.05771633391299
-c15,1108.984,384.623163,1949.97,3356.913,179.180675,313.098433,352.195669,171.815177,3203.344,269.843015,67.905725,67.905725,1286.6509,57.16078,10.00372,12779.593982,4.57872812450413
-c16,1108.984,6756.043,174.989917,375.615141,135.437118,313.098433,3366.9575,171.815177,269.843015,357.330128,67.905725,67.905725,89.882488,57.16078,10.00372,13322.971867,4.39198453527753
-c17,1108.984,472.88204,1949.97,3356.913,179.180675,3053.2545,264.708556,171.815177,357.330128,269.843015,67.905725,1273.3007,80.061262,57.16078,10.00372,12673.313278,4.61712616880261
-c18,1108.984,472.88204,170.515404,3356.913,179.180675,313.098433,264.708556,171.815177,3203.344,269.843015,89.882488,67.905725,1286.6509,57.16078,10.00372,11022.887913,5.30843520990056
-c19,1108.984,384.623163,174.989917,3356.913,135.437118,313.098433,264.708556,171.815177,3203.344,269.843015,67.905725,67.905725,89.882488,57.16078,10.00372,9676.614817,6.04697896370779
-c20,1108.984,6756.043,219.119355,375.615141,179.180675,313.098433,352.195669,171.815177,3203.344,269.843015,67.905725,67.905725,89.882488,57.16078,10.00372,13242.096903,4.41880820158194
-c21,1108.984,472.88204,170.515404,3356.913,179.180675,3053.2545,3366.9575,171.815177,357.330128,269.843015,67.905725,1273.3007,89.882488,57.16078,10.00372,14005.928852,4.17782262379993
-c22,1108.984,6756.043,170.515404,3356.913,1731.6018,264.708556,264.708556,171.815177,357.330128,3427.0759,1299.926,69.928921,89.882488,57.16078,10.00372,19136.59743,3.05771633391299
-c23,1108.984,6756.043,219.119355,375.615141,179.180675,313.098433,352.195669,171.815177,3203.344,269.843015,67.905725,67.905725,89.882488,57.16078,10.00372,13242.096903,4.41880820158194
-c24,1108.984,6756.043,174.989917,3356.913,1731.6018,3053.2545,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,21662.00923,2.70124003510435
-c25,1108.984,384.623163,1949.97,3356.913,179.180675,313.098433,352.195669,171.815177,3203.344,269.843015,67.905725,67.905725,1286.6509,57.16078,10.00372,12779.593982,4.57872812450413
-c26,1108.984,472.88204,1949.97,3356.913,179.180675,3053.2545,264.708556,171.815177,357.330128,269.843015,67.905725,1273.3007,80.061262,57.16078,10.00372,12673.313278,4.61712616880261
-c27,1108.984,6756.043,174.989917,375.615141,135.437118,313.098433,3366.9575,171.815177,269.843015,357.330128,67.905725,67.905725,89.882488,57.16078,10.00372,13322.971867,4.39198453527753
-c28,1108.984,6756.043,170.515404,3356.913,179.180675,264.708556,352.195669,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,17316.567648,3.37909265245465
-c29,1108.984,6756.043,170.515404,3356.913,179.180675,272.76268,264.708556,171.815177,357.330128,357.330128,67.905725,1273.3007,89.882488,57.16078,10.00372,14493.836161,4.03718420639608
-c30,1108.984,472.88204,170.515404,3356.913,179.180675,313.098433,264.708556,171.815177,3203.344,269.843015,89.882488,67.905725,1286.6509,57.16078,10.00372,11022.887913,5.30843520990056
-c19,9676.614817
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,76.708720,0,151.959867,74.930729,153.127903,153.127903,0,0,38.454652,0,0,648.309774,0
-c2,0,188.762345,0,0,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,874.358735,0
-c3,0,188.762345,90.173220,0,76.708720,0,151.959867,74.930729,0,153.127903,38.454652,0,38.454652,0,0,812.572088,0
-c4,0,0,90.173220,0,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,0,38.454652,0,0,928.897513,0
-c5,0,0,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,38.454652,0,0,971.82012,0
-c6,0,0,90.173220,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,0,0,972.988156,0
-c7,0,0,90.173220,0,0,151.959867,151.959867,74.930729,153.127903,0,0,38.454652,38.454652,0,0,699.06089,0
-c8,0,188.762345,0,0,76.708720,0,151.959867,74.930729,153.127903,153.127903,38.454652,0,38.454652,0,0,875.526771,0
-c9,0,0,90.173220,0,76.708720,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,815.512132,0
-c10,0,188.762345,90.173220,0,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,964.531955,0
-c11,0,0,90.173220,0,76.708720,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,815.512132,0
-c12,0,0,90.173220,0,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,0,38.454652,0,0,928.897513,0
-c13,0,188.762345,90.173220,0,76.708720,0,151.959867,74.930729,0,153.127903,38.454652,0,38.454652,0,0,812.572088,0
-c14,0,0,90.173220,0,0,151.959867,151.959867,74.930729,153.127903,0,0,38.454652,38.454652,0,0,699.06089,0
-c15,0,188.762345,0,0,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,874.358735,0
-c16,0,0,90.173220,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,0,0,972.988156,0
-c17,0,188.762345,0,0,76.708720,0,151.959867,74.930729,153.127903,153.127903,38.454652,0,38.454652,0,0,875.526771,0
-c18,0,188.762345,90.173220,0,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,964.531955,0
-c19,0,188.762345,90.173220,0,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,38.454652,0,0,1002.986607,0
-c20,0,0,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,38.454652,0,0,971.82012,0
-c21,0,188.762345,90.173220,0,76.708720,0,0,74.930729,153.127903,153.127903,38.454652,0,38.454652,0,0,813.740124,0
-c22,0,0,90.173220,0,0,151.959867,151.959867,74.930729,153.127903,0,0,38.454652,38.454652,0,0,699.06089,0
-c23,0,0,90.173220,157.595858,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,38.454652,0,0,971.82012,0
-c24,0,0,90.173220,0,0,0,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,586.843545,0
-c25,0,188.762345,0,0,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,874.358735,0
-c26,0,188.762345,0,0,76.708720,0,151.959867,74.930729,153.127903,153.127903,38.454652,0,38.454652,0,0,875.526771,0
-c27,0,0,90.173220,157.595858,76.708720,151.959867,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,0,0,972.988156,0
-c28,0,0,90.173220,0,76.708720,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,0,0,815.512132,0
-c29,0,0,90.173220,0,76.708720,151.959867,151.959867,74.930729,153.127903,153.127903,38.454652,0,38.454652,0,0,928.897513,0
-c30,0,188.762345,90.173220,0,76.708720,151.959867,151.959867,74.930729,0,153.127903,38.454652,38.454652,0,0,0,964.531955,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,23.915213,0,43.122319,18.327706,47.088742,47.088742,0,0,11.960929,0,0,191.503651,0
-c2,0,109.549158,0,0,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,309.047315,0
-c3,0,109.549158,41.660867,0,23.915213,0,43.122319,18.327706,0,47.088742,11.960929,0,11.960929,0,0,307.585863,0
-c4,0,0,41.660867,0,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,0,11.960929,0,0,288.247766,0
-c5,0,0,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,11.960929,0,0,314.025753,0
-c6,0,0,41.660867,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0,0,317.992176,0
-c7,0,0,41.660867,0,0,43.122319,43.122319,18.327706,47.088742,0,0,11.960929,11.960929,0,0,217.243811,0
-c8,0,109.549158,0,0,23.915213,0,43.122319,18.327706,47.088742,47.088742,11.960929,0,11.960929,0,0,313.013738,0
-c9,0,0,41.660867,0,23.915213,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,257.959131,0
-c10,0,109.549158,41.660867,0,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,350.708182,0
-c11,0,0,41.660867,0,23.915213,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,257.959131,0
-c12,0,0,41.660867,0,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,0,11.960929,0,0,288.247766,0
-c13,0,109.549158,41.660867,0,23.915213,0,43.122319,18.327706,0,47.088742,11.960929,0,11.960929,0,0,307.585863,0
-c14,0,0,41.660867,0,0,43.122319,43.122319,18.327706,47.088742,0,0,11.960929,11.960929,0,0,217.243811,0
-c15,0,109.549158,0,0,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,309.047315,0
-c16,0,0,41.660867,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0,0,317.992176,0
-c17,0,109.549158,0,0,23.915213,0,43.122319,18.327706,47.088742,47.088742,11.960929,0,11.960929,0,0,313.013738,0
-c18,0,109.549158,41.660867,0,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,350.708182,0
-c19,0,109.549158,41.660867,0,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,11.960929,0,0,362.669111,0
-c20,0,0,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,11.960929,0,0,314.025753,0
-c21,0,109.549158,41.660867,0,23.915213,0,0,18.327706,47.088742,47.088742,11.960929,0,11.960929,0,0,311.552286,0
-c22,0,0,41.660867,0,0,43.122319,43.122319,18.327706,47.088742,0,0,11.960929,11.960929,0,0,217.243811,0
-c23,0,0,41.660867,60.905800,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,11.960929,0,0,314.025753,0
-c24,0,0,41.660867,0,0,0,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,190.921599,0
-c25,0,109.549158,0,0,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,309.047315,0
-c26,0,109.549158,0,0,23.915213,0,43.122319,18.327706,47.088742,47.088742,11.960929,0,11.960929,0,0,313.013738,0
-c27,0,0,41.660867,60.905800,23.915213,43.122319,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0,0,317.992176,0
-c28,0,0,41.660867,0,23.915213,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0,0,257.959131,0
-c29,0,0,41.660867,0,23.915213,43.122319,43.122319,18.327706,47.088742,47.088742,11.960929,0,11.960929,0,0,288.247766,0
-c30,0,109.549158,41.660867,0,23.915213,43.122319,43.122319,18.327706,0,47.088742,11.960929,11.960929,0,0,0,350.708182,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,9.597736,0,18.163317,8.429972,19.033555,19.033555,0,0,4.848960,0,0,79.107095,0
-c2,0,32.936441,0,0,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,116.022258,0
-c3,0,32.936441,13.739047,0,9.597736,0,18.163317,8.429972,0,19.033555,4.848960,0,4.848960,0,0,111.597988,0
-c4,0,0,13.739047,0,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,0,4.848960,0,0,115.858419,0
-c5,0,0,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,4.848960,0,0,123.556732,0
-c6,0,0,13.739047,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0,0,124.42697,0
-c7,0,0,13.739047,0,0,18.163317,18.163317,8.429972,19.033555,0,0,4.848960,4.848960,0,0,87.227128,0
-c8,0,32.936441,0,0,9.597736,0,18.163317,8.429972,19.033555,19.033555,4.848960,0,4.848960,0,0,116.892496,0
-c9,0,0,13.739047,0,9.597736,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,102.579487,0
-c10,0,32.936441,13.739047,0,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,129.761305,0
-c11,0,0,13.739047,0,9.597736,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,102.579487,0
-c12,0,0,13.739047,0,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,0,4.848960,0,0,115.858419,0
-c13,0,32.936441,13.739047,0,9.597736,0,18.163317,8.429972,0,19.033555,4.848960,0,4.848960,0,0,111.597988,0
-c14,0,0,13.739047,0,0,18.163317,18.163317,8.429972,19.033555,0,0,4.848960,4.848960,0,0,87.227128,0
-c15,0,32.936441,0,0,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,116.022258,0
-c16,0,0,13.739047,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0,0,124.42697,0
-c17,0,32.936441,0,0,9.597736,0,18.163317,8.429972,19.033555,19.033555,4.848960,0,4.848960,0,0,116.892496,0
-c18,0,32.936441,13.739047,0,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,129.761305,0
-c19,0,32.936441,13.739047,0,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,4.848960,0,0,134.610265,0
-c20,0,0,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,4.848960,0,0,123.556732,0
-c21,0,32.936441,13.739047,0,9.597736,0,0,8.429972,19.033555,19.033555,4.848960,0,4.848960,0,0,112.468226,0
-c22,0,0,13.739047,0,0,18.163317,18.163317,8.429972,19.033555,0,0,4.848960,4.848960,0,0,87.227128,0
-c23,0,0,13.739047,21.882908,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,4.848960,0,0,123.556732,0
-c24,0,0,13.739047,0,0,0,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,74.818434,0
-c25,0,32.936441,0,0,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,116.022258,0
-c26,0,32.936441,0,0,9.597736,0,18.163317,8.429972,19.033555,19.033555,4.848960,0,4.848960,0,0,116.892496,0
-c27,0,0,13.739047,21.882908,9.597736,18.163317,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0,0,124.42697,0
-c28,0,0,13.739047,0,9.597736,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0,0,102.579487,0
-c29,0,0,13.739047,0,9.597736,18.163317,18.163317,8.429972,19.033555,19.033555,4.848960,0,4.848960,0,0,115.858419,0
-c30,0,32.936441,13.739047,0,9.597736,18.163317,18.163317,8.429972,0,19.033555,4.848960,4.848960,0,0,0,129.761305,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c5,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c6,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c7,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c12,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c23,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c24,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c28,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c29,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c5,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c6,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c7,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c12,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c23,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c24,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c28,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c29,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,1720.485,426.5549,652.9992,51.115816,544.1687,100.738165,49.948052,102.069715,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,4552.91533,2.08754638739248
-c2,352.4661,123.468921,426.5549,652.9992,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,7.640322,1.669613,2759.423203,3.44435090754903
-c3,352.4661,123.468921,59.178279,652.9992,51.115816,544.1687,100.738165,49.948052,527.54432,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,2831.694387,3.3564433598447
-c4,352.4661,1720.485,59.178279,652.9992,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,3559.805326,2.66992743158991
-c5,352.4661,1720.485,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,26.069312,7.640322,1.669613,3256.259239,2.91881609187762
-c6,352.4661,1720.485,59.178279,104.457756,51.115816,100.738165,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,7.640322,1.669613,3320.348369,2.8624773112632
-c7,352.4661,1720.485,59.178279,652.9992,333.7788,100.738165,100.738165,49.948052,102.069715,543.5573,208.37216,26.069312,26.069312,7.640322,1.669613,4285.779495,2.21766470891044
-c8,352.4661,123.468921,426.5549,652.9992,51.115816,544.1687,100.738165,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,2773.596403,3.42675012306936
-c9,352.4661,1720.485,59.178279,652.9992,51.115816,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,3982.844692,2.38634007960609
-c10,352.4661,123.468921,59.178279,652.9992,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,7.640322,1.669613,2392.046582,3.97334309130343
-c11,352.4661,1720.485,59.178279,652.9992,51.115816,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,3982.844692,2.38634007960609
-c12,352.4661,1720.485,59.178279,652.9992,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,3559.805326,2.66992743158991
-c13,352.4661,123.468921,59.178279,652.9992,51.115816,544.1687,100.738165,49.948052,527.54432,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,2831.694387,3.3564433598447
-c14,352.4661,1720.485,59.178279,652.9992,333.7788,100.738165,100.738165,49.948052,102.069715,543.5573,208.37216,26.069312,26.069312,7.640322,1.669613,4285.779495,2.21766470891044
-c15,352.4661,123.468921,426.5549,652.9992,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,7.640322,1.669613,2759.423203,3.44435090754903
-c16,352.4661,1720.485,59.178279,104.457756,51.115816,100.738165,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,7.640322,1.669613,3320.348369,2.8624773112632
-c17,352.4661,123.468921,426.5549,652.9992,51.115816,544.1687,100.738165,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,2773.596403,3.42675012306936
-c18,352.4661,123.468921,59.178279,652.9992,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,7.640322,1.669613,2392.046582,3.97334309130343
-c19,352.4661,123.468921,59.178279,652.9992,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,26.069312,7.640322,1.669613,2207.784604,4.30495878551027
-c20,352.4661,1720.485,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,26.069312,7.640322,1.669613,3256.259239,2.91881609187762
-c21,352.4661,123.468921,59.178279,652.9992,51.115816,544.1687,590.3019,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,2895.783517,3.28215896457294
-c22,352.4661,1720.485,59.178279,652.9992,333.7788,100.738165,100.738165,49.948052,102.069715,543.5573,208.37216,26.069312,26.069312,7.640322,1.669613,4285.779495,2.21766470891044
-c23,352.4661,1720.485,59.178279,104.457756,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,26.069312,7.640322,1.669613,3256.259239,2.91881609187762
-c24,352.4661,1720.485,59.178279,652.9992,333.7788,544.1687,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,4708.938211,2.01837899124689
-c25,352.4661,123.468921,426.5549,652.9992,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,7.640322,1.669613,2759.423203,3.44435090754903
-c26,352.4661,123.468921,426.5549,652.9992,51.115816,544.1687,100.738165,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,2773.596403,3.42675012306936
-c27,352.4661,1720.485,59.178279,104.457756,51.115816,100.738165,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,7.640322,1.669613,3320.348369,2.8624773112632
-c28,352.4661,1720.485,59.178279,652.9992,51.115816,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,3982.844692,2.38634007960609
-c29,352.4661,1720.485,59.178279,652.9992,51.115816,100.738165,100.738165,49.948052,102.069715,102.069715,26.069312,206.54856,26.069312,7.640322,1.669613,3559.805326,2.66992743158991
-c30,352.4661,123.468921,59.178279,652.9992,51.115816,100.738165,100.738165,49.948052,527.54432,102.069715,26.069312,26.069312,210.33129,7.640322,1.669613,2392.046582,3.97334309130343
-c19,2207.784604
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_tensors.txt b/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_tensors.txt
deleted file mode 100644
index 6c6b42b93c0446c298489429261592fe99e2f81b..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar10/vgg16_tensors.txt
+++ /dev/null
@@ -1,64 +0,0 @@
-#Conv1,3
-Conv1,163.415,464.69,310.228,694.841,7.62006,16.0868,83.4767,247.452
-Add1,113.913,376.561,69.7114,211.862,314.261,894.668,83.2799,265.625
-Relu1,75.1381,267.733,63.043,222.775,116.511,379.888,83.4086,300.518
-#Conv2,4
-Conv2,1186.37,5295.79,1399.51,5412.29,109.635,397.898,83.3307,436.945
-Add2,114.041,707.602,69.1905,303.338,336.556,1525.29,83.1178,365.325
-Relu2,74.295,455.493,62.4233,282.215,106.67,463.637,83.0734,372.529
-Pool1,175.393,1012.62,79.7262,360.302,106.496,467.712,20.9488,92.2916
-#Conv3,3
-Conv3,613.82,3267.16,354.47,1585.56,28.5399,124.01,41.6322,203.92
-Add3,51.2818,306.368,40.9358,206.435,52.9835,255.846,41.6412,204.32
-Relu3,36.6237,218.815,31.1491,157.975,53.9219,262.044,41.4745,201.753
-#Conv4,4
-Conv4,982.846,5808.36,543.421,2780.23,52.8973,253.818,41.5238,236.022
-Add4,51.329,344.524,37.6205,200.601,146.069,779.141,41.3499,213.133
-Relu4,36.7139,244.573,31.1126,165.225,52.5946,268.222,41.3761,210.186
-Pool2,88.7044,575.997,40.8451,210.857,52.6109,263.231,10.5892,50.2118
-#Conv5,3
-Conv5,491.91,3047.42,278.988,1427.88,14.8008,70.9043,20.8418,110.25
-Add5,48.2779,312.538,39.1711,216.321,27.4739,144.775,20.8433,109.029
-Relu5,18.3751,115.933,15.6197,87.4008,27.1876,142.132,20.7564,107.244
-#Conv6,3
-Conv6,858.952,5554.16,490.314,2738.69,28.103,144.6,20.8654,122.714
-Add6,48.3038,331.31,38.2773,222.684,78.3804,453.985,20.7551,113.98
-Relu6,18.3515,122.286,15.5774,91.8805,26.9604,148.182,20.7925,111.946
-#Conv7,4
-Conv7,859.425,5760.38,514.632,2922.64,28.2053,152.501,20.8436,124.457
-Add7,48.1751,337.048,38.3104,225.263,81.46,477.128,20.7616,115.225
-Relu7,18.3889,125.084,15.5863,93.2475,27.0202,149.73,20.7296,113.484
-Pool3,45.2833,307.898,21.7732,125.807,27.105,148.004,5.37692,25.667
-#Conv8,3
-Conv8,434.046,2940.43,240.716,1378.49,9.68469,49.5859,10.5597,59.0058
-Add8,46.6813,323.76,42.0846,253.744,14.6636,82.9186,10.5141,57.5583
-Relu8,9.34284,60.6176,7.88397,50.8594,14.4739,80.2487,10.4677,57.0219
-#Conv9,3
-Conv9,801.893,5651.89,477.878,2885.71,16.2347,90.1067,10.5851,63.9065
-Add9,46.6775,338.552,41.8259,265.047,39.8499,247.783,10.4589,60.1816
-Relu9,9.34283,63.4928,7.84042,52.587,14.2726,83.3662,10.4563,59.3499
-#Conv10,4
-Conv10,802.661,5895.34,481.652,3018.22,16.267,94.4725,10.5438,65.7062
-Add10,46.7852,353.064,41.8232,272.933,40.0563,257.396,10.5152,61.7303
-Relu10,9.31493,65.6928,7.8553,54.9037,14.2674,85.1708,10.4698,61.6665
-Pool4,24.6055,179.513,12.2268,81.0192,14.3183,85.3146,2.76689,12.3171
-#Conv11,3
-Conv11,308.859,2238.17,173.774,1077.96,6.79928,37.3658,2.77964,12.8278
-Add11,32.7593,228.974,32.499,205.175,4.84751,25.3885,2.71712,12.3719
-Relu11,2.49349,12.8041,2.09916,16.791,4.48398,23.4935,2.65339,11.6345
-#Conv12,3
-Conv12,308.957,2143.29,172.103,1054.21,6.35668,34.3,2.76929,12.2454
-Add12,32.7648,220.92,32.3458,201.907,4.88662,25.364,2.69777,11.7633
-Relu12,2.50068,12.3579,2.09976,17.1837,4.44727,22.6715,2.64916,11.3108
-#Conv13,4
-Conv13,308.526,2076.4,171.684,1041.08,6.20782,33.1375,2.76826,12.0417
-Add13,32.723,215.055,32.2976,199.983,4.91853,25.4026,2.71496,12.0093
-Relu13,2.48762,12.2284,2.10382,16.7344,4.36041,22.0096,2.72796,11.4339
-Pool5,8.20681,50.2013,4.24587,28.8535,4.30952,21.6535,0.789328,1.12606
-#FC1,3
-Mul1,10.6715,65.8927,5.49296,37.0659,2.18449,8.22985,0.810781,1.37103
-Add14,0.902452,1.91287,0.999932,9.47008,1.88605,7.19323,0.760782,0.960605
-Relu14,0.846016,1.57242,1.14743,10.6248,1.5964,5.26066,0.762322,0.9319
-#FC2,2
-Mul2,0.821147,1.66598,1.31517,8.1931,1.76803,6.49192,0.133188,0.0183427
-Add15,0.226743,0.146973,0.354443,1.81062,0.764484,1.36953,0.120211,0.0438951
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_confs1.txt b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_confs1.txt
deleted file mode 100644
index 2c88b81aaa9620b01f75897e5e082dd78c1d3d57..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_confs1.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,7,7,8 8 8,8 8 8 8,7,7,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,7,8 8 8,7,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,7,8 8 8,7,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,7,7,8 8 8,8 8 8 8,7,7,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,7,8 8 8,7,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,7,8 8 8,7,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,7,8 8 8,7,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,7,7,8 8 8,8 8 8 8,7,7,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,7,8 8 8,7,8 8 8 8,8 8 8,8 8
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_confs2.txt b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_confs2.txt
deleted file mode 100644
index 8dacc6e3fe910098503d504feece0b5ecd1753dc..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_confs2.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,7,8 8 8,7,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,7,8 8 8,8 8 8,8 8 8 8,6,6,7,8 8 8,7,6,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,7,7,8 8 8,7,7,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,7,7,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,7,8 8 8,7,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,7,8 8 8,8 8 8,8 8 8 8,6,6,7,8 8 8,7,6,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,7,7,8 8 8,7,7,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,7,7,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,7,8 8 8,7,8 8 8,8 8 8,7,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,7,8 8 8,8 8 8,8 8 8 8,6,6,7,8 8 8,7,6,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,7,7,8 8 8,7,7,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,7,7,8 8 8,8 8 8,7,8 8 8,8 8
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_fp16.csv b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_fp16.csv
deleted file mode 100644
index 04c4cfc4efb2b0fe6f94ddc332d356ba2966da72..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_fp16.csv
+++ /dev/null
@@ -1,148 +0,0 @@
-Add1,69.7114,211.862,107.8,104.062,6085.83,3099.48,2986.34,49.2963,152.515,80.4532,73.6474,4385.91,2317.11,2113.98
-Add10,41.8232,272.933,216.003,56.9294,13052.8,10330.2,2722.54,29.5763,193.08,152.806,40.2755,9233.85,7307.84,1926.04
-Add10_f2h,40.0563,257.396,205.254,52.142,12850.3,10247,2603.29,28.3409,182.148,145.252,36.8963,9087.18,7246.3,1840.92
-Add10_h2f,10.5152,61.7303,48.6618,13.0686,11780.8,9286.75,2494.08,7.45143,43.7235,34.4675,9.2562,8348.86,6581.42,1767.47
-Add11,32.499,205.175,160.15,45.0253,12627.7,9856.71,2770.94,22.9909,145.16,113.305,31.856,8930.63,6971.08,1959.62
-Add11_f2h,4.84751,25.3885,19.8298,5.55869,10456,8166.49,2289.46,3.43461,18.1215,14.1539,3.96771,7436.9,5808.46,1628.52
-Add11_h2f,2.71712,12.3719,9.65008,2.72179,9106.03,7102.66,2003.36,1.92229,8.92178,6.95893,1.96292,6563.12,5119.11,1444.06
-Add12,32.3458,201.907,158.806,43.1007,12485.6,9820.54,2665.04,22.8734,142.79,112.309,30.483,8830.29,6945.65,1884.77
-Add12_f2h,4.88662,25.364,19.9643,5.39974,10378.2,8169.14,2209.1,3.45883,18.0489,14.2063,3.84293,7376.52,5806.53,1570.11
-Add12_h2f,2.69777,11.7633,9.24658,2.51676,8720.78,6854.97,1865.81,1.90775,8.49592,6.67815,1.81791,6297.92,4950.42,1347.6
-Add13,32.2976,199.983,158.214,41.7697,12384.7,9798.1,2586.56,22.8397,141.436,111.896,29.5422,8758.82,6929.74,1829.23
-Add13_f2h,4.91853,25.4026,20.1163,5.28634,10314.5,8167.97,2146.57,3.50657,18.2597,14.4609,3.79914,7330.34,5804.95,1525.56
-Add13_h2f,2.71496,12.0093,9.49625,2.51304,8840.31,6990.41,1849.9,1.92046,8.68342,6.8666,1.81698,6383.37,5047.78,1335.71
-Add14,0.999932,9.47008,7.42388,2.0462,18954.6,14861.4,4093.23,0.708467,7.11012,5.57305,1.53723,14214.1,11144.8,3069.6
-Add14_f2h,1.88605,7.19323,5.64536,1.54786,7493.19,5878.86,1614.33,1.34509,5.54955,4.35727,1.1924,5686.64,4462.19,1224.59
-Add14_h2f,0.760782,0.960605,0.753025,0.20758,2523.02,1977.84,545.18,0.538123,1.10915,0.869399,0.23978,2911.43,2282.18,629.323
-Add15,0.354443,1.81062,1.41722,0.393395,10528.4,8243.7,2284.66,0.255481,2.29175,1.79496,0.496829,13969.3,10946.3,3023.19
-Add15_f2h,0.764484,1.36953,1.07351,0.296021,3102.43,2430.16,672.268,0.572894,1.59231,1.24879,0.343541,3452.8,2704.95,747.932
-Add15_h2f,0.120211,0.0438951,0.0343584,0.00953666,722.085,565.205,156.881,0.0858258,0.0897805,0.0702545,0.0195276,1484.29,1161.5,322.825
-Add1_f2h,314.261,894.668,472.564,422.104,5684.96,2999.37,2685.59,222.29,654.332,365.302,299.156,4151.15,2314.09,1902.21
-Add1_h2f,83.2799,265.625,135.314,130.311,6379.44,3249.94,3129.5,58.8888,190.41,99.6739,92.2071,4573.2,2394.11,2214.4
-Add2,69.1905,303.338,199.548,103.79,8768.52,5768.33,3000.19,48.9274,214.509,141.116,73.3959,6200.7,4079.22,2121.55
-Add2_f2h,336.556,1525.29,1077.85,447.443,9065.06,6405.97,2659.09,238.064,1078.88,762.399,316.488,6410.37,4530.13,1880.3
-Add2_h2f,83.1178,365.325,234.29,131.036,8790.52,5637.51,3153.01,58.7743,258.343,165.686,92.6601,6216.16,3986.69,2229.56
-Add3,40.9358,206.435,132.407,74.0277,10086.1,6469.28,3616.86,28.9503,146.001,93.6465,52.3555,7132.63,4575.01,2557.68
-Add3_f2h,52.9835,255.846,166.622,89.2241,9657.91,6289.83,3368.08,37.4671,180.924,117.829,63.0955,6829.52,4447.85,2381.69
-Add3_h2f,41.6412,204.32,129.501,74.8188,9814.92,6220.84,3594.07,29.4469,144.508,91.593,52.9164,6942.13,4400.1,2542.08
-Add4,37.6205,200.601,138.934,61.6676,10664.8,7386.36,3278.47,26.6043,141.874,98.2614,43.6138,7542.19,5223.74,2318.49
-Add4_f2h,146.069,779.141,555.206,223.935,10669.7,7603.38,3066.3,103.384,551.386,392.897,158.49,7544.72,5376.51,2168.23
-Add4_h2f,41.3499,213.133,145.707,67.426,10308.5,7047.34,3261.17,29.2407,150.732,103.047,47.6846,7289.72,4983.6,2306.13
-Add5,39.1711,216.321,149.235,67.0856,11045,7619.75,3425.3,27.7004,152.99,105.545,47.445,7810.9,5388.61,2422.31
-Add5_f2h,27.4739,144.775,100.707,44.0675,10539.5,7331.45,3208.09,19.4282,102.386,71.2208,31.165,7453.51,5184.78,2268.74
-Add5_h2f,20.8433,109.029,74.8169,34.2117,10461.4,7178.79,3282.61,14.7445,77.147,52.9395,24.2078,7398.96,5077.34,2321.65
-Add6,38.2773,222.684,163.353,59.3305,11635.7,8535.58,3100.14,27.0679,157.484,115.525,41.9589,8228.62,6036.27,2192.36
-Add6_f2h,78.3804,453.985,337.891,116.094,11585.2,8622.75,2962.45,55.4654,321.237,239.086,82.1516,8192.13,6097.33,2094.81
-Add6_h2f,20.7551,113.98,83.0303,30.9501,10983.3,8000.93,2982.38,14.6773,80.6183,58.7275,21.891,7767.8,5658.58,2109.24
-Add7,38.3104,225.263,168.705,56.5575,11759.8,8807.22,2952.55,27.0935,159.323,119.322,40.0017,8316.1,6228.2,2087.92
-Add7_f2h,81.46,477.128,362.784,114.344,11714.6,8907.2,2807.39,57.624,337.519,256.632,80.8876,8283.65,6298.49,1985.17
-Add7_h2f,20.7616,115.225,85.6469,29.5778,11099.7,8250.44,2849.26,14.6822,81.4998,60.5791,20.9209,7850.09,5834.99,2015.11
-Add8,42.0846,253.744,188.878,64.8666,12058.8,8976.12,3082.68,29.7613,179.461,133.584,45.8767,8527.74,6347.77,2180
-Add8_f2h,14.6636,82.9186,61.9626,20.956,11309.1,8451,2858.12,10.3711,58.6733,43.8445,14.8289,8000.2,5978.33,2021.89
-Add8_h2f,10.5141,57.5583,42.7041,14.8542,10948.7,8123.2,2825.55,7.43528,40.7375,30.2242,10.5133,7748.37,5748.74,1999.65
-Add9,41.8259,265.047,205.792,59.2543,12674.6,9841.07,2833.5,29.5785,187.445,145.539,41.9062,8963.23,6959.46,2003.8
-Add9_f2h,39.8499,247.783,193.75,54.033,12435.2,9723.35,2711.83,28.1894,175.299,137.075,38.2246,8793.58,6875.93,1917.67
-Add9_h2f,10.4589,60.1816,46.5522,13.6294,11508.3,8902.04,2606.31,7.39612,42.594,32.9479,9.64626,8144.6,6300.12,1844.51
-Conv1,310.228,694.841,342.376,352.465,5137.93,2796.38,2341.55,227.742,502.069,283.327,253.153,4000.56,2481.3,1665.08
-Conv10,481.652,3018.22,2355.36,662.857,12534.5,9781.99,2752.5,340.625,2134.28,1665.53,468.767,8863.57,6917.32,1946.32
-Conv10_f2h,16.267,94.4725,71.7156,22.7569,11613.2,8815.27,2797.92,11.5068,66.8653,50.7629,16.1033,8215,6235.99,1979.12
-Conv10_h2f,10.5438,65.7062,52.8554,12.8508,12463.8,10026.1,2437.66,7.45594,46.5025,37.4079,9.09478,8820.88,7095.76,1725.14
-Conv11,173.774,1077.96,835.557,242.405,12410.5,9620.47,2790,122.957,762.523,591.018,171.513,8775.98,6803.22,1972.86
-Conv11_f2h,6.79928,37.3658,28.7179,8.64791,10989.1,8446.21,2542.86,4.81142,26.5033,20.3687,6.13521,7787.5,5985.57,1802.09
-Conv11_h2f,2.77964,12.8278,10.0359,2.79191,9229.29,7220.6,2008.69,1.96619,9.21135,7.20639,2.00505,6624.71,5182.79,1441.97
-Conv12,172.103,1054.21,824.356,229.857,12254.7,9583.51,2671.18,121.754,745.623,583.022,162.614,8665.94,6777.23,1888.86
-Conv12_f2h,6.35668,34.3,26.5952,7.70476,10790.7,8366.52,2424.14,4.49979,24.3436,18.8767,5.46737,7649.53,5931.31,1718.37
-Conv12_h2f,2.76929,12.2454,9.6556,2.58978,8837.54,6968.57,1868.97,1.95863,8.84718,6.97582,1.87148,6378.23,5029.25,1349.07
-Conv13,171.684,1041.08,819.652,221.43,12131.7,9552.2,2579.48,121.445,736.263,579.64,156.639,8579.07,6755.25,1824.02
-Conv13_f2h,6.20782,33.1375,25.8857,7.25183,10669.3,8333.91,2335.41,4.39486,23.5409,18.3911,5.15048,7566.21,5910.3,1656.12
-Conv13_h2f,2.76826,12.0417,9.55355,2.48819,8700.26,6902.56,1797.7,1.95823,8.68074,6.88708,1.79382,6270.3,4974.74,1295.67
-Conv1_f2h,7.62006,16.0868,8.21342,7.87343,4543.5,2445.6,2097.91,5.45871,12.1698,7.31434,5.59711,3581.38,2233.61,1494.73
-Conv1_h2f,83.4767,247.452,143.632,103.82,5928.39,3441,2487.39,59.0279,183.614,113.023,73.667,4398.72,2707.49,1764.94
-Conv2,1399.51,5412.29,3378.78,2033.51,7735.19,4829.01,2906.18,989.654,3830.09,2393.05,1438.02,5474.06,3420.35,2055.12
-Conv2_f2h,109.635,397.898,194.591,203.307,7252.79,3544.58,3708.21,77.5331,282.821,139.764,143.829,5150.57,2542.09,2622.66
-Conv2_h2f,83.3307,436.945,336.643,100.302,10486.9,8079.59,2407.3,58.9247,308.997,238.072,70.9279,7415.87,5713.65,1702.27
-Conv3,354.47,1585.56,948.24,637.318,8947.03,5350.96,3596.07,250.683,1121.23,670.544,450.697,6326.72,3783.93,2542.83
-Conv3_f2h,28.5399,124.01,71.14,52.8695,8689.56,4984.75,3704.81,20.1903,87.7504,50.3439,37.4082,6145.43,3525.53,2620.02
-Conv3_h2f,41.6322,203.92,135.55,68.3698,9796.09,6511.67,3284.41,29.4395,144.212,95.8623,48.3504,6927.44,4604.9,2322.58
-Conv4,543.421,2780.23,1853.46,926.763,10232.8,6821.89,3410.92,384.277,1965.96,1310.62,655.348,7235.83,4823.95,2411.91
-Conv4_f2h,52.8973,253.818,152.989,100.829,9599.81,5786.36,3813.45,37.4134,179.493,108.19,71.3037,6789.28,4092.37,2696.95
-Conv4_h2f,41.5238,236.022,174.197,61.8248,11368,8390.24,2977.78,29.3628,166.909,123.187,43.7213,8038.87,5933.13,2105.74
-Conv5,278.988,1427.88,945.3,482.585,10236.5,6776.91,3459.59,197.282,1009.7,668.449,341.252,7238.45,4792.15,2446.32
-Conv5_f2h,14.8008,70.9043,45.1842,25.7201,9581.37,6105.87,3475.5,10.4668,50.1662,31.9688,18.1977,6778.52,4319.78,2458.78
-Conv5_h2f,20.8418,110.25,77.5339,32.7165,10579.5,7440.09,3139.45,14.7375,78.0695,54.9032,23.1665,7491.32,5268.35,2222.99
-Conv6,490.314,2738.69,1950.74,787.949,11171.6,7957.5,3214.1,346.719,1936.58,1379.4,557.184,7899.63,5626.93,2272.73
-Conv6_f2h,28.103,144.6,96.262,48.3381,10291.3,6851.06,3440.28,19.8745,102.268,68.0815,34.1867,7277.97,4845.06,2432.93
-Conv6_h2f,20.8654,122.714,92.9633,29.7508,11762.4,8910.71,2851.66,14.7546,86.7919,65.7503,21.0417,8318.79,6302.01,2016.79
-Conv7,514.632,2922.64,2150.64,772,11358.7,8358.43,3000.25,363.913,2066.64,1520.74,545.901,8031.96,5910.47,2121.51
-Conv7_f2h,28.2053,152.501,107.622,44.8785,10813.7,7631.44,3182.25,19.9546,107.904,76.1494,31.7549,7647.43,5396.98,2250.48
-Conv7_h2f,20.8436,124.457,96.3593,28.0977,11942.1,9246.03,2696.05,14.7391,88.0253,68.1525,19.8729,8446.1,6539.32,1906.8
-Conv8,240.716,1378.49,1006.49,371.999,11454.2,8363.32,3090.86,170.247,974.885,711.794,263.094,8099.49,5913.93,2185.59
-Conv8_f2h,9.68469,49.5859,35.2796,14.3062,10241.5,7287.22,2954.3,6.87516,35.2358,25.0675,10.1686,7251.09,5159.52,2091.63
-Conv8_h2f,10.5597,59.0058,44.3452,14.6605,11175.6,8398.89,2776.67,7.46752,41.7631,31.3867,10.3764,7908.98,5943.95,1965.05
-Conv9,477.878,2885.71,2189.28,696.429,12080.4,9165.65,2914.72,338.05,2040.9,1548.28,492.646,8542.53,6481.6,2061.04
-Conv9_f2h,16.2347,90.1067,65.6407,24.466,11099.8,8085.77,3014.08,11.4848,63.772,46.458,17.3143,7851.77,5719.72,2132.09
-Conv9_h2f,10.5851,63.9065,50.4134,13.4931,12075.4,9525.82,2549.57,7.48625,45.2319,35.6818,9.55026,8545.5,6741.23,1804.29
-Mul1,5.49296,37.0659,29.09,7.97593,13530.7,10624,2906.69,3.93147,26.5587,20.8353,5.72401,9606.57,7543.24,2063.55
-Mul1_f2h,2.18449,8.22985,6.46583,1.76402,7507.4,5897.26,1610.14,1.54872,6.15957,4.84093,1.31877,5588.96,4391.09,1197.99
-Mul1_h2f,0.810781,1.37103,1.07631,0.294716,3393.71,2664.12,729.59,0.574493,1.51826,1.19191,0.326392,3760.1,2951.69,808.488
-Mul2,1.31517,8.1931,6.40985,1.78324,12447.5,9739.56,2707.96,0.931548,6.00801,4.69986,1.30826,9120.64,7136.62,1984.19
-Mul2_f2h,1.76803,6.49192,5.08483,1.40709,7151.61,5599.64,1551.97,1.26447,5.09282,3.99098,1.10192,5498.59,4306.31,1192.38
-Mul2_h2f,0.133188,0.0183427,0.0143504,0.0039923,261.068,204.28,56.7875,0.0949979,0.06404,0.0500845,0.013956,879.603,688.197,191.411
-Pool1,79.7262,360.302,206.802,153.501,9038.84,5188.08,3850.75,56.3789,254.792,146.244,108.551,6391.73,3668.8,2722.98
-Pool1_f2h,106.496,467.712,270.25,197.462,8783.74,5075.34,3708.4,75.3091,330.755,191.123,139.636,6211.29,3589.1,2622.27
-Pool1_h2f,20.9488,92.2916,53.56,38.7316,8810.98,5113.32,3697.66,14.8149,65.2847,37.8887,27.3971,6231.7,3616.65,2615.16
-Pool2,40.8451,210.857,135.193,75.6642,10325.2,6620.16,3705.01,28.8847,149.127,95.6146,53.5134,7301.99,4681.87,2620.15
-Pool2_f2h,52.6109,263.231,170.165,93.0664,10007,6469.01,3537.99,37.2057,186.157,120.341,65.8167,7076.35,4574.53,2501.84
-Pool2_h2f,10.5892,50.2118,32.2214,17.9904,9482.45,6084.99,3397.46,7.49375,35.5699,22.8258,12.7444,6710.71,4306.37,2404.39
-Pool3,21.7732,125.807,90.0357,35.7709,11556.7,8270.76,3285.91,15.3981,89.0019,63.696,25.3063,8175.07,5850.69,2324.42
-Pool3_f2h,27.105,148.004,106.551,41.4535,10921,7862.21,3058.75,19.1678,104.677,75.3585,29.3184,7723.33,5560.2,2163.15
-Pool3_h2f,5.37692,25.667,18.3482,7.31879,9547.33,6824.97,2722.36,3.80213,18.2207,13.0249,5.19588,6777.51,4844.84,1932.7
-Pool4,12.2268,81.0192,62.5832,18.436,13254.2,10238.3,3015.89,8.64895,57.3698,44.3151,13.055,9382.73,7247.8,2134.97
-Pool4_f2h,14.3183,85.3146,66.1354,19.1792,11917,9238.05,2678.99,10.1261,60.363,46.7931,13.5702,8430.6,6535.39,1895.24
-Pool4_h2f,2.76689,12.3171,9.50437,2.81277,8901.99,6869.12,2032.88,1.95715,8.87091,6.84512,2.02583,6408.16,4944.77,1463.42
-Pool5,4.24587,28.8535,22.6878,6.16569,13613.1,10704.3,2908.83,3.01182,20.5632,16.1689,4.39464,9690.26,7619.75,2070.69
-Pool5_f2h,4.30952,21.6535,17.0404,4.6131,10038.8,7899.74,2139.08,3.05175,15.4491,12.159,3.29042,7146.27,5623.84,1522.54
-Pool5_h2f,0.789328,1.12606,0.885458,0.240603,2854.59,2244.62,609.971,0.558354,1.28688,1.01189,0.27502,3258.62,2562.17,696.513
-Relu1,63.043,222.775,109.755,113.019,7068.58,3483,3585.58,44.5829,158.651,79.4284,79.9495,5034.27,2521.06,2536.23
-Relu10,7.8553,54.9037,42.8846,12.0191,13979.1,10918.9,3060.23,5.55636,38.9249,30.404,8.52113,9908.04,7739.05,2169.03
-Relu10_f2h,14.2674,85.1708,66.8096,18.3612,11939.8,9365.82,2573.95,10.0908,60.2697,47.2767,12.9932,8447.54,6626.47,1821.1
-Relu10_h2f,10.4698,61.6665,48.0288,13.6376,11779.2,9174.16,2605.03,7.40475,43.6532,33.9994,9.654,8336.17,6492.58,1843.64
-Relu11,2.09916,16.791,13.0701,3.72095,15998.3,12453,3545.28,1.48462,12.0634,9.39021,2.67327,11491.9,8945.38,2546.63
-Relu11_f2h,4.48398,23.4935,18.3062,5.18733,10483.3,8168.53,2314.74,3.17253,16.7097,13.0203,3.68949,7455.03,5808.96,1646.13
-Relu11_h2f,2.65339,11.6345,9.04902,2.58551,8767.81,6819.4,1948.41,1.87634,8.37685,6.51523,1.86173,6311.27,4908.75,1402.6
-Relu12,2.09976,17.1837,13.475,3.70873,16368.8,12836,3532.8,1.48516,12.3639,9.69591,2.66821,11777.3,9235.93,2541.54
-Relu12_f2h,4.44727,22.6715,17.7981,4.87339,10189.2,7998.76,2190.47,3.14699,16.1439,12.6738,3.47034,7246.5,5688.6,1558.02
-Relu12_h2f,2.64916,11.3108,8.86276,2.44801,8539.6,6691.38,1848.22,1.87333,8.17249,6.4039,1.76875,6170.04,4834.82,1335.34
-Relu13,2.10382,16.7344,13.1973,3.53714,15914.8,12551.1,3363.76,1.48869,12.0067,9.46927,2.53767,11417,9004.38,2412.85
-Relu13_f2h,4.36041,22.0096,17.3783,4.63133,10092.1,7968.08,2123.99,3.0874,15.6907,12.3898,3.30126,7183.38,5671.67,1511.87
-Relu13_h2f,2.72796,11.4339,9.01109,2.4228,8560.24,6746.45,1813.78,2.03488,8.23175,6.48774,1.74417,6172.8,4865.11,1307.81
-Relu14,1.14743,10.6248,8.32102,2.30382,18530.7,14512.9,4017.87,0.811516,7.89588,6.18393,1.71212,13778.4,10791.3,2987.38
-Relu14_f2h,1.5964,5.26066,4.12403,1.13663,6435.18,5042.95,1392.23,1.14442,4.24048,3.32542,0.91515,5108.03,4003.08,1105.06
-Relu14_h2f,0.762322,0.9319,0.729587,0.202312,2419.48,1894.24,525.243,0.539977,1.14939,0.899736,0.249678,2965.98,2321.79,644.251
-Relu1_f2h,116.511,379.888,188.365,191.522,6536.78,3247.4,3289.38,82.4051,270.74,136.938,135.452,4668.45,2369.38,2327.06
-Relu1_h2f,83.4086,300.518,148.77,151.748,7206.56,3567.67,3638.89,58.9804,213.679,107.062,107.336,5124.37,2567.66,2573.96
-Relu2,62.4233,282.215,169.665,112.55,9042.07,5436.06,3606.01,44.1442,199.587,119.994,79.596,6394.13,3844.28,2549.94
-Relu2_f2h,106.67,463.637,286.355,177.281,8692.96,5369.01,3323.95,75.4328,327.879,202.516,125.367,6147.13,3796.78,2350.42
-Relu2_h2f,83.0734,372.529,221.009,151.52,8968.66,5320.81,3647.85,58.7425,263.431,156.291,107.144,6342.03,3762.66,2579.46
-Relu3,31.1491,157.975,97.2597,60.7158,10143.1,6244.72,3898.34,22.0274,111.732,68.79,42.9423,7173.3,4416.4,2756.93
-Relu3_f2h,53.9219,262.044,163.271,98.7728,9719.42,6055.82,3663.6,38.1317,185.316,115.466,69.8509,6872.96,4282.31,2590.67
-Relu3_h2f,41.4745,201.753,123.336,78.4172,9731.62,5949.14,3782.48,29.3302,142.684,87.2272,55.4573,6883.19,4207.91,2675.32
-Relu4,31.1126,165.225,109.288,55.9374,10620.9,7025.18,3595.76,22.0035,116.872,77.306,39.5668,7511.45,4968.48,2543
-Relu4_f2h,52.5946,268.222,180.055,88.1672,10199.7,6846.98,3352.7,37.1932,189.687,127.336,62.3518,7212.66,4841.86,2370.81
-Relu4_h2f,41.3761,210.186,137.878,72.3072,10159.6,6664.53,3495.09,29.259,148.644,97.5098,51.1349,7184.42,4712.91,2471.53
-Relu5,15.6197,87.4008,58.9328,28.468,11190.8,7545.75,3645.04,11.048,61.8499,41.7045,20.1457,7916.78,5338.18,2578.64
-Relu5_f2h,27.1876,142.132,96.6157,45.5163,10455.9,7107.57,3348.35,19.2268,100.527,68.3345,32.1932,7394.61,5026.63,2368
-Relu5_h2f,20.7564,107.244,71.9782,35.2657,10333.7,6935.58,3398.08,14.6781,75.8517,50.9089,24.9429,7308.38,4905.11,2403.28
-Relu6,15.5774,91.8805,65.7777,26.1027,11796,8444.84,3351.21,11.0158,65.0123,46.543,18.4696,8345.51,5974.6,2370.94
-Relu6_f2h,26.9604,148.182,106.898,41.2837,10992.7,7930.13,3062.55,19.0651,104.8,75.6027,29.1976,7774.08,5608.24,2165.85
-Relu6_h2f,20.7925,111.946,79.7133,32.2323,10779.9,7676.09,3103.81,14.709,79.2691,56.4457,22.8237,7636.46,5437.77,2198.71
-Relu7,15.5863,93.2475,68.0626,25.1848,11966.2,8734.34,3231.84,11.0234,65.9937,48.1698,17.8242,8467.81,6180.86,2286.99
-Relu7_f2h,27.0202,149.73,110.21,39.5201,11082.9,8157.67,2925.21,19.1073,105.894,77.944,27.9502,7837.75,5769.07,2068.69
-Relu7_h2f,20.7296,113.484,82.3887,31.0951,10948.8,7948.82,3000.02,14.6587,80.2669,58.2735,21.9936,7743.7,5621.92,2121.8
-Relu8,7.88397,50.8594,37.3843,13.4751,12900,9482.44,3417.54,5.60997,36.5052,26.8322,9.67313,9199.71,6762.66,2437.09
-Relu8_f2h,14.4739,80.2487,59.2479,21.0008,11105.4,8199.18,2906.22,10.2403,56.8303,41.9579,14.8727,7866.72,5808.05,2058.7
-Relu8_h2f,10.4677,57.0219,41.8152,15.2067,10895,7989.54,2905.49,7.40341,40.3631,29.5989,10.7642,7710.49,5654.25,2056.24
-Relu9,7.84042,52.587,40.301,12.286,13413.6,10279.8,3133.81,5.54547,37.2786,28.5693,8.70947,9505.87,7285.1,2220.81
-Relu9_f2h,14.2726,83.3662,64.1642,19.202,11681.8,8991.1,2690.7,10.0949,58.9936,45.4054,13.5884,8264.16,6360.65,1903.53
-Relu9_h2f,10.4563,59.3499,45.3517,13.9982,11352.1,8674.64,2677.47,7.39481,42.0028,32.0961,9.90692,8032.96,6138.36,1894.63
-Softmax1,2.01231,8.10635,6.34144,1.76491,8058.61,6304.39,1754.22,1.42328,5.95817,4.66119,1.29711,5923.64,4634.63,1289.13
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_fp32.csv b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_fp32.csv
deleted file mode 100644
index 2e203cf73d4f5220f9f3217398c952496028fb62..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_fp32.csv
+++ /dev/null
@@ -1,50 +0,0 @@
-Add1,113.913,376.561,219.636,156.925,6609.15,3854.17,2754.97,80.5497,282.75,177.622,111.325,4961.17,3115.93,1954.23
-Add10,46.7852,353.064,295.37,57.6943,15092.7,12626.5,2466.15,33.0858,249.698,208.891,40.8191,10672.6,8928.67,1744.47
-Add11,32.7593,228.974,188.206,40.768,13979,11490.1,2488.93,23.1655,161.932,133.101,28.8363,9885.52,8125.42,1760.39
-Add12,32.7648,220.92,180.753,40.1662,13485,11033.3,2451.71,23.1723,156.257,127.848,28.4139,9536.19,7802.45,1733.98
-Add13,32.723,215.055,175.489,39.5662,13144.2,10726,2418.24,23.1396,152.085,124.105,27.9836,9295.27,7585.25,1710.24
-Add14,0.902452,1.91287,1.55297,0.359903,4206.37,3415.11,791.258,0.639486,1.96075,1.59154,0.36925,4301.37,3491.69,809.769
-Add15,0.226743,0.146973,0.119291,0.0276818,1268.32,1029.69,238.626,0.161374,0.274735,0.222974,0.0517663,2347.78,1906.32,441.514
-Add2,114.041,707.602,550.188,157.413,12409.4,9648.8,2760.62,80.6401,500.915,389.6,111.35,8784.41,6832.28,1952.72
-Add3,51.2818,306.368,231.407,74.9613,11948.4,9024.9,2923.5,36.2641,216.686,163.674,53.0143,8450.22,6382.88,2067.43
-Add4,51.329,344.524,275.839,68.6848,13425,10748.6,2676.4,36.2967,243.665,195.087,48.5789,9494.83,7601.95,1892.93
-Add5,48.2779,312.538,246.161,66.377,12947.4,10197.6,2749.79,34.1397,221.022,174.082,46.9412,9155.71,7211.24,1944.51
-Add6,48.3038,331.31,269.638,61.6728,13717.7,11164.2,2553.51,34.1584,234.299,190.684,43.6161,9700.3,7894.61,1805.74
-Add7,48.1751,337.048,277.011,60.0367,13992.6,11500.2,2492.4,34.0674,238.357,195.898,42.4598,9894.64,8132.16,1762.55
-Add8,46.6813,323.76,263.597,60.1628,13871.3,11293.7,2577.64,33.0123,228.964,186.417,42.5484,9808.96,7986.22,1822.79
-Add9,46.6775,338.552,280.794,57.7577,14509.8,12034.3,2475.46,33.0125,239.488,198.63,40.8649,10264.6,8513.31,1751.56
-Conv1,163.415,464.69,270.83,193.861,5719.79,3344.04,2375.74,115.618,364.219,239.324,137.798,4496.91,2961.18,1689.91
-Conv10,802.661,5895.34,4887.39,1007.95,14689.8,12178.3,2511.49,567.577,4168.67,3455.92,712.856,10387.4,8611.5,1776.16
-Conv11,308.859,2238.17,1841.36,396.805,14494.3,11924.8,2569.46,218.426,1582.75,1302.12,280.685,10249.2,8432.33,1817.28
-Conv12,308.957,2143.29,1754.35,388.942,13875.6,11357.9,2517.72,218.489,1515.61,1240.55,275.097,9811.8,8031.51,1780.54
-Conv13,308.526,2076.4,1693.52,382.878,13461.8,10980,2481.86,218.187,1468.29,1197.52,270.808,9519.36,7764.54,1755.1
-Conv2,1186.37,5295.79,3610.95,1684.84,9029.61,6180.44,2849.17,840.943,3760.72,2581.03,1191.77,6467.54,4470.42,2016.67
-Conv3,613.82,3267.16,2333.45,933.711,10647.5,7604.92,3042.61,434.112,2311.16,1650.96,660.338,7532.26,5381.04,2151.65
-Conv4,982.846,5808.36,4413.44,1394.92,11819.9,8981.38,2838.55,695.005,4107.39,3121,986.415,8358.48,6351.33,2007.22
-Conv5,491.91,3047.42,2348.81,698.608,12391.2,9550.74,2840.5,347.871,2154.95,1660.93,494.035,8762.18,6753.64,2008.58
-Conv6,858.952,5554.16,4403.19,1150.97,12932.7,10252.8,2679.96,607.382,3927.42,3113.55,813.89,9144.98,7249.96,1895.06
-Conv7,859.425,5760.38,4659.21,1101.17,13405.5,10842.9,2562.58,607.715,4073.23,3294.58,778.676,9479.18,7667.16,1812.06
-Conv8,434.046,2940.43,2368.34,572.081,13549.6,10913.5,2636.05,306.938,2079.26,1674.71,404.559,9581.1,7717.15,1864.02
-Conv9,801.893,5651.89,4624.92,1026.97,14096.7,11535.3,2561.33,567.034,3996.52,3270.33,726.23,9967.96,8156.84,1811.22
-Mul1,10.6715,65.8927,53.5391,12.3536,12394.8,10071.9,2322.88,7.57836,46.691,37.9335,8.75846,8780.28,7134.75,1645.72
-Mul2,0.821147,1.66598,1.35236,0.313621,4076.84,3309.78,767.066,0.582562,1.7168,1.39365,0.323189,4224.31,3430.04,794.372
-Pool1,175.393,1012.62,726.774,285.844,11549.1,8289.34,3259.78,124.05,716.6,514.49,202.181,8172.74,5868.18,2305.37
-Pool2,88.7044,575.997,444.625,131.372,12987.9,10025.6,2962.21,62.7347,407.367,314.456,92.9131,9184.52,7089.77,2094.78
-Pool3,45.2833,307.898,247.595,60.3023,13598.6,10935.2,2663.36,32.0292,217.79,175.137,42.6548,9616.06,7732.71,1883.4
-Pool4,24.6055,179.513,148.419,31.0941,14591.4,12063.9,2527.45,17.4004,126.964,104.972,21.9976,10319,8531.59,1787.91
-Pool5,8.20681,50.2013,40.8424,9.35894,12226.9,9947.17,2279.71,5.82151,35.6921,29.0395,6.65352,8656.71,7042.81,1614.13
-Relu1,75.1381,267.733,149.248,118.484,7127.84,3973.96,3153.88,53.1331,197.389,117.092,83.9442,5255.76,3118.27,2234.45
-Relu10,9.31493,65.6928,54.5384,11.1544,14104.6,11709.8,2394.86,6.58686,46.4969,38.6024,7.89659,9982.66,8287.84,1695.28
-Relu11,2.49349,12.8041,10.5081,2.29598,10267.6,8426.39,1841.24,1.76354,9.25156,7.5928,1.65903,7415.31,6085.61,1329.91
-Relu12,2.50068,12.3579,10.0966,2.26132,9872.52,8066.18,1806.35,1.77005,8.95586,7.31638,1.63973,7136.08,5830.14,1306.13
-Relu13,2.48762,12.2284,9.96443,2.26399,9833.11,8012.58,1820.53,1.75929,8.83304,7.19779,1.63547,7102.38,5787.52,1315.04
-Relu14,0.846016,1.57242,1.27739,0.29503,3703.4,3008.56,694.844,0.599154,1.69071,1.37363,0.317122,3975.03,3229.61,745.515
-Relu2,74.295,455.493,338.819,116.674,12261.1,9120.32,3140.75,52.5363,322.394,239.897,82.5246,8677.47,6456.88,2221.33
-Relu3,36.6237,218.815,161.907,56.9082,11949.1,8841.42,3107.67,25.8986,154.77,114.524,40.248,8450.92,6253.36,2197.68
-Relu4,36.7139,244.573,191.979,52.5941,13329.1,10462.8,2866.31,25.9761,173.026,135.817,37.2093,9428.22,7400.79,2027.47
-Relu5,18.3751,115.933,90.1275,25.8055,12618,9809.37,2808.64,12.9942,82.009,63.7545,18.2548,8924.67,6938.12,1986.59
-Relu6,18.3515,122.286,98.2308,24.0552,13327.1,10705.5,2621.61,12.9773,86.4993,69.4836,17.016,9426.46,7572.14,1854.36
-Relu7,18.3889,125.084,101.453,23.631,13603.6,11033.6,2570.02,13.0046,88.4856,71.7689,16.7172,9621.58,7803.86,1817.78
-Relu8,9.34284,60.6176,48.9763,11.6413,12976.3,10484.3,2492.01,6.60743,42.914,34.6725,8.24179,9185.09,7421.14,1764
-Relu9,9.34283,63.4928,52.2617,11.2311,13592.4,11188.1,2404.33,6.60725,44.9455,36.9955,7.95121,9621.07,7919.28,1702.05
-Softmax1,2.09116,9.17789,7.4508,1.72709,8772.87,7122.11,1650.76,1.47902,6.75862,5.48652,1.27228,6456.75,5241.62,1215.29
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_layers.txt b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_layers.txt
deleted file mode 100644
index af6469192145b246beaec42cf42a6629e5ed1a93..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_layers.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-Conv1,2000,3,32,32,64,3,3,3,1,1
-Conv2,2000,64,32,32,64,64,3,3,1,1
-Conv3,2000,64,16,16,128,64,3,3,1,1
-Conv4,2000,128,16,16,128,128,3,3,1,1
-Conv5,2000,128,8,8,256,128,3,3,1,1
-Conv6,2000,256,8,8,256,256,3,3,1,1
-Conv7,2000,256,8,8,256,256,3,3,1,1
-Conv8,2000,256,4,4,512,256,3,3,1,1
-Conv9,2000,512,4,4,512,512,3,3,1,1
-Conv10,2000,512,4,4,512,512,3,3,1,1
-Conv11,2000,512,2,2,512,512,3,3,1,1
-Conv12,2000,512,2,2,512,512,3,3,1,1
-Conv13,2000,512,2,2,512,512,3,3,1,1
-FC1,2000,512,512,512
-FC2,2000,512,512,10
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_ops.txt b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_ops.txt
deleted file mode 100644
index 2075774fde3e66afd1a1946cac46b87038a6486f..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_ops.txt
+++ /dev/null
@@ -1,64 +0,0 @@
-#Conv1,3
-Conv1
-Add1
-Relu1
-#Conv2,4
-Conv2
-Add2
-Relu2
-Pool1
-#Conv3,3
-Conv3
-Add3
-Relu3
-#Conv4,4
-Conv4
-Add4
-Relu4
-Pool2
-#Conv5,3
-Conv5
-Add5
-Relu5
-#Conv6,3
-Conv6
-Add6
-Relu6
-#Conv7,4
-Conv7
-Add7
-Relu7
-Pool3
-#Conv8,3
-Conv8
-Add8
-Relu8
-#Conv9,3
-Conv9
-Add9
-Relu9
-#Conv10,4
-Conv10
-Add10
-Relu10
-Pool4
-#Conv11,3
-Conv11
-Add11
-Relu11
-#Conv12,3
-Conv12
-Add12
-Relu12
-#Conv13,4
-Conv13
-Add13
-Relu13
-Pool5
-#FC1,3
-Mul1
-Add14
-Relu14
-#FC2,2
-Mul2
-Add15
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_confs1.txt b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_confs1.txt
deleted file mode 100644
index e6989e16ee3869ecb13ecc9f73af7f9a66c24dee..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_confs1.txt
+++ /dev/null
@@ -1,33 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9
-9 9 9,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,5,7,8 8 8,5,8 8 8 8,6,7
-9 9 9,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,5,7,8 8 8,8 8 8,6,6,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,5,7,8 8 8,6,8 8 8 8,6,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,5,7,8 8 8,5,8 8 8 8,6,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,5,7,8 8 8,5,6,6,5
-9 9 9,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,5,8 8 8 8,8 8 8,8 8 8,6,6,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,8 8 8 8,8 8 8,5,5,8 8 8,5,6,8 8 8,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,7,7,8 8 8,8 8 8,7,8 8 8,5,6,8 8 8,8 8
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,8 8 8,7,8 8 8,5,6,6,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,5,5,8 8 8 8,8 8 8,8 8 8,8 8 8 8,5,5,7,7,6
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,5,5,8 8 8,5,6,5,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,6,8 8 8 8,8 8 8,8 8 8,8 8 8 8,6,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,8 8 8 8,8 8 8,5,7,8 8 8,5,7,6,6
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,5,7,8 8 8,5,6,6,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,5,8 8 8 8,8 8 8,5,5,6,7
-8 8 8,8 8 8 8,8 8 8,8 8 8 8,6,6,8 8 8 8,8 8 8,6,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,6
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,5,8 8 8 8,8 8 8,5,6,5,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,8 8 8,7,8 8 8,8 8 8,6,6,7
-9 9 9,9 9 9 9,8 8 8,8 8 8 8,7,7,6,8 8 8,8 8 8,8 8 8 8,8 8 8,7,5,8 8 8,8 8
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,6,8 8 8 8,8 8 8,5,5,6,7
-8 8 8,8 8 8 8,8 8 8,8 8 8 8,6,8 8 8,8 8 8 8,8 8 8,6,8 8 8 8,8 8 8,8 8 8,5,6,6
-8 8 8,8 8 8 8,8 8 8,8 8 8 8,7,8 8 8,8 8 8 8,8 8 8,6,8 8 8 8,5,8 8 8,8 8 8 8,6,6
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,6,8 8 8,8 8 8 8,8 8 8,8 8 8,7,7,5,7,7,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,5,8 8 8,8 8 8 8,8 8 8,8 8 8,6,8 8 8,7,6,8 8 8,5
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,7,5,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8
-8 8 8,8 8 8 8,8 8 8,8 8 8 8,8 8 8,5,5,8 8 8,8 8 8,8 8 8 8,7,8 8 8,8 8 8 8,5,8 8
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,7,5,8 8 8,8 8 8,8 8 8 8,8 8 8,7,7,7,8 8
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,7,7,5,6,6,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,8 8 8,8 8 8 8,8 8 8,5,5,6,5
-9 9 9,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,7,8 8 8,8 8 8,8 8 8 8,8 8 8,6,6,6,7
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,8 8 8,6,8 8 8 8,8 8 8,8 8 8,7,8 8 8,5,8 8 8 8,6,7
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,7,6,8 8 8,5
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_confs2.txt b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_confs2.txt
deleted file mode 100644
index 4732b0c143c403151aa411ec9b97007a2fc4898b..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_confs2.txt
+++ /dev/null
@@ -1,52 +0,0 @@
-9 9 9,9 9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9 9,9 9 9 9,9 9 9,9 9
-9 9 9,7,8 8 8,9 9 9 9,9 9 9,5,9 9 9 9,8 8 8,8 8 8,8 8 8 8,8 8 8,5,6,9 9 9,8 8
-9 9 9,7,8 8 8,9 9 9 9,9 9 9,6,8 8 8 8,8 8 8,7,5,8 8 8,9 9 9,8 8 8 8,7,8 8
-9 9 9,7,8 8 8,9 9 9 9,9 9 9,5,8 8 8 8,8 8 8,9 9 9,8 8 8 8,8 8 8,9 9 9,7,7,8 8
-8 8 8,7,8 8 8,9 9 9 9,7,7,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,7,6,7,8 8
-8 8 8,8 8 8 8,9 9 9,7,7,7,9 9 9 9,8 8 8,5,5,6,8 8 8,9 9 9 9,5,7
-9 9 9,9 9 9 9,8 8 8,9 9 9 9,8 8 8,8 8 8,8 8 8 8,7,5,6,7,7,7,8 8 8,8 8
-9 9 9,7,8 8 8,9 9 9 9,6,5,9 9 9 9,8 8 8,7,8 8 8 8,8 8 8,9 9 9,6,7,7
-9 9 9,9 9 9 9,8 8 8,7,7,9 9 9,9 9 9 9,9 9 9,7,7,8 8 8,6,5,9 9 9,7
-9 9 9,7,8 8 8,9 9 9 9,7,6,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,9 9 9,6,5,8 8
-9 9 9,7,8 8 8,9 9 9 9,7,6,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,9 9 9,6,7,8 8
-9 9 9,7,8 8 8,9 9 9 9,6,9 9 9,8 8 8 8,8 8 8,8 8 8,5,8 8 8,9 9 9,6,7,8 8
-9 9 9,9 9 9 9,8 8 8,9 9 9 9,5,5,8 8 8 8,7,7,8 8 8 8,9 9 9,9 9 9,9 9 9 9,7,8 8
-9 9 9,9 9 9 9,8 8 8,7,6,9 9 9,8 8 8 8,9 9 9,7,8 8 8 8,5,9 9 9,6,7,8 8
-9 9 9,8 8 8 8,9 9 9,8 8 8 8,8 8 8,9 9 9,9 9 9 9,5,6,6,8 8 8,6,5,6,6
-8 8 8,8 8 8 8,9 9 9,8 8 8 8,5,9 9 9,7,9 9 9,7,8 8 8 8,7,6,5,7,6
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,5,6,7,8 8 8,7,5,8 8 8,6,5,5,9 9
-8 8 8,8 8 8 8,9 9 9,8 8 8 8,9 9 9,8 8 8,5,5,8 8 8,9 9 9 9,6,8 8 8,6,5,8 8
-9 9 9,7,8 8 8,9 9 9 9,7,5,8 8 8 8,8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,6,7,6
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,9 9 9,8 8 8,7,9 9 9,5,8 8 8 8,8 8 8,6,6,8 8 8,5
-9 9 9,8 8 8 8,8 8 8,9 9 9 9,7,9 9 9,7,9 9 9,7,9 9 9 9,6,7,6,5,8 8
-9 9 9,8 8 8 8,9 9 9,8 8 8 8,9 9 9,8 8 8,5,7,7,9 9 9 9,6,8 8 8,9 9 9 9,6,6
-9 9 9,9 9 9 9,8 8 8,5,6,5,8 8 8 8,8 8 8,7,8 8 8 8,8 8 8,9 9 9,6,5,8 8
-8 8 8,8 8 8 8,8 8 8,9 9 9 9,8 8 8,8 8 8,7,7,8 8 8,8 8 8 8,8 8 8,5,6,9 9 9,8 8
-9 9 9,8 8 8 8,8 8 8,9 9 9 9,8 8 8,8 8 8,7,7,5,8 8 8 8,8 8 8,6,6,9 9 9,5
-8 8 8,8 8 8 8,8 8 8,8 8 8 8,6,5,7,8 8 8,7,5,8 8 8,8 8 8,8 8 8 8,8 8 8,9 9
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,9 9 9,5,6,8 8 8,8 8 8 8,8 8 8,9 9 9,6,6,8 8
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,9 9 9,5,6,8 8 8,8 8 8 8,8 8 8,9 9 9,7,9 9 9,8 8
-9 9 9,8 8 8 8,8 8 8,9 9 9 9,9 9 9,8 8 8,7,7,8 8 8,8 8 8 8,8 8 8,6,6,9 9 9,7
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,8 8 8,9 9 9,5,7,8 8 8,8 8 8 8,8 8 8,9 9 9,6,6,8 8
-9 9 9,9 9 9 9,9 9 9,8 8 8 8,7,7,8 8 8 8,8 8 8,7,7,7,9 9 9,6,7,5
-8 8 8,9 9 9 9,9 9 9,7,9 9 9,8 8 8,9 9 9 9,9 9 9,9 9 9,6,9 9 9,7,8 8 8 8,7,6
-8 8 8,9 9 9 9,9 9 9,7,8 8 8,5,8 8 8 8,9 9 9,8 8 8,5,8 8 8,6,6,7,7
-8 8 8,9 9 9 9,8 8 8,7,9 9 9,8 8 8,9 9 9 9,9 9 9,9 9 9,6,9 9 9,7,8 8 8 8,7,6
-9 9 9,8 8 8 8,8 8 8,8 8 8 8,6,5,9 9 9 9,9 9 9,7,7,8 8 8,7,7,6,9 9
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,5,9 9 9,5,6,9 9 9,9 9 9 9,8 8 8,6,8 8 8 8,5,9 9
-8 8 8,8 8 8 8,8 8 8,8 8 8 8,8 8 8,8 8 8,9 9 9 9,5,8 8 8,9 9 9 9,6,8 8 8,7,6,8 8
-8 8 8,9 9 9 9,8 8 8,9 9 9 9,9 9 9,6,6,8 8 8,7,5,8 8 8,9 9 9,6,7,5
-9 9 9,9 9 9 9,8 8 8,9 9 9 9,8 8 8,7,7,8 8 8,6,7,8 8 8,9 9 9,7,5,9 9
-9 9 9,9 9 9 9,9 9 9,8 8 8 8,6,5,9 9 9 9,9 9 9,6,8 8 8 8,6,9 9 9,6,9 9 9,9 9
-8 8 8,9 9 9 9,9 9 9,8 8 8 8,8 8 8,6,6,9 9 9,7,7,8 8 8,5,8 8 8 8,6,7
-9 9 9,9 9 9 9,8 8 8,9 9 9 9,9 9 9,8 8 8,8 8 8 8,7,7,8 8 8 8,8 8 8,8 8 8,5,7,8 8
-8 8 8,9 9 9 9,8 8 8,9 9 9 9,8 8 8,6,6,8 8 8,7,7,8 8 8,9 9 9,8 8 8 8,7,9 9
-9 9 9,9 9 9 9,8 8 8,9 9 9 9,7,5,8 8 8 8,8 8 8,7,9 9 9 9,8 8 8,9 9 9,6,9 9 9,8 8
-9 9 9,9 9 9 9,8 8 8,9 9 9 9,8 8 8,7,8 8 8 8,8 8 8,6,7,7,7,7,7,8 8
-9 9 9,8 8 8 8,8 8 8,9 9 9 9,8 8 8,7,6,8 8 8,7,8 8 8 8,7,6,6,7,8 8
-9 9 9,9 9 9 9,8 8 8,6,9 9 9,5,8 8 8 8,9 9 9,8 8 8,8 8 8 8,8 8 8,9 9 9,6,7,8 8
-8 8 8,9 9 9 9,8 8 8,8 8 8 8,9 9 9,7,5,9 9 9,7,8 8 8 8,9 9 9,8 8 8,8 8 8 8,6,6
-9 9 9,9 9 9 9,8 8 8,9 9 9 9,9 9 9,5,8 8 8 8,8 8 8,7,6,8 8 8,7,6,7,6
-8 8 8,9 9 9 9,8 8 8,9 9 9 9,8 8 8,6,6,9 9 9,9 9 9,7,8 8 8,5,8 8 8 8,8 8 8,8 8
-9 9 9,8 8 8 8,8 8 8,9 9 9 9,6,9 9 9,8 8 8 8,8 8 8,7,7,8 8 8,9 9 9,6,7,8 8
-9 9 9,8 8 8 8,8 8 8,9 9 9 9,9 9 9,5,8 8 8 8,8 8 8,7,7,8 8 8,9 9 9,6,8 8 8,8 8
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_results1.csv b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_results1.csv
deleted file mode 100644
index f5825603b85a2a5a4313c427ce35c6c0f97996c4..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_results1.csv
+++ /dev/null
@@ -1,407 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,77.680494,157.113483,1299.926,19.513340,1286.6509,0.839227,0.021821,20418.942195,2.86568647864431
-c2,1108.984,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,77.680494,157.113483,1299.926,1273.3007,29.645681,0.839227,0.021821,20415.724336,2.86613815867435
-c3,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,77.680494,157.113483,1299.926,29.645681,1286.6509,0.839227,0.021821,20449.568536,2.86139467704907
-c4,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,77.680494,157.113483,1299.926,19.513340,1286.6509,0.839227,0.021821,20439.436195,2.86281314212731
-c5,1129.478,7471.505,1949.97,3356.913,1731.6018,157.113483,3366.9575,1683.0934,77.680494,157.113483,1299.926,19.513340,29.645681,0.839227,0.010789,22431.361197,2.60859276743163
-c6,1108.984,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,77.680494,3427.0759,1299.926,1273.3007,29.645681,0.839227,0.021821,23685.686753,2.47044922978827
-c7,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,3366.9575,1683.0934,77.680494,77.680494,1299.926,19.513340,29.645681,57.16078,0.021821,22369.163557,2.61584598066495
-c8,1129.478,7471.505,1949.97,3356.913,1731.6018,157.113483,157.113483,1683.0934,3203.344,157.113483,1299.926,19.513340,29.645681,57.16078,10.00372,22413.49517,2.6106721034858
-c9,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,3203.344,157.113483,1299.926,19.513340,29.645681,0.839227,0.021821,22308.094482,2.62300693714174
-c10,1129.478,7471.505,1949.97,3356.913,38.840247,77.680494,3366.9575,1683.0934,3203.344,3427.0759,19.513340,19.513340,39.466907,1.117251,0.016391,25784.48477,2.26936032028628
-c11,1129.478,7471.505,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,77.680494,77.680494,1299.926,19.513340,29.645681,0.552395,0.021821,25247.793425,2.31760002255484
-c12,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,118.016247,3427.0759,1299.926,1273.3007,1286.6509,0.839227,0.021821,25003.521725,2.34024179683735
-c13,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,3366.9575,1683.0934,77.680494,157.113483,1299.926,19.513340,39.466907,0.839227,0.016391,22402.090789,2.61200113564989
-c14,1129.478,7471.505,1949.97,3356.913,1731.6018,157.113483,3366.9575,1683.0934,77.680494,157.113483,1299.926,19.513340,29.645681,0.839227,0.021821,22431.372229,2.60859148449651
-c15,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,77.680494,3427.0759,1299.926,19.513340,19.513340,0.839227,0.021821,22442.261052,2.60732581475131
-c16,1129.478,6358.145,1949.97,3356.913,59.008124,118.016247,3366.9575,1683.0934,118.016247,3427.0759,1299.926,1273.3007,1286.6509,57.16078,0.016391,25483.728189,2.29614309882034
-c17,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,77.680494,3427.0759,1299.926,19.513340,29.645681,0.552395,0.021821,22452.106561,2.60618247216156
-c18,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,3203.344,157.113483,1299.926,1273.3007,29.645681,0.839227,0.021821,23561.881842,2.48343010066169
-c19,1108.984,7471.505,1949.97,3356.913,78.556742,157.113483,118.016247,1683.0934,3203.344,3427.0759,1299.926,39.466907,19.513340,57.16078,10.00372,23980.642519,2.44006333661129
-c20,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,118.016247,3427.0759,1299.926,19.513340,19.513340,0.839227,0.021821,22482.596805,2.60264804329551
-c21,1129.478,6358.145,1949.97,3356.913,59.008124,3053.2545,3366.9575,1683.0934,118.016247,3427.0759,1299.926,1273.3007,19.513340,0.839227,0.016391,27095.507329,2.15955678247874
-c22,1129.478,6358.145,1949.97,3356.913,78.556742,3053.2545,3366.9575,1683.0934,118.016247,3427.0759,19.513340,1273.3007,1286.6509,0.839227,0.016391,27101.780847,2.15905688845431
-c23,1129.478,7471.505,1949.97,3356.913,59.008124,3053.2545,3366.9575,1683.0934,3203.344,157.113483,39.466907,19.513340,39.466907,1.117251,10.00372,25540.205132,2.29106564772964
-c24,1108.984,6358.145,1949.97,3356.913,38.840247,3053.2545,3366.9575,1683.0934,3203.344,118.016247,1299.926,39.466907,29.645681,57.16078,0.010789,25663.728051,2.28003844565038
-c25,1129.478,7471.505,1949.97,3356.913,1731.6018,157.113483,77.680494,1683.0934,3203.344,3427.0759,1299.926,1273.3007,1286.6509,57.16078,10.00372,28114.817177,2.08126150230644
-c26,1129.478,6358.145,1949.97,3356.913,1731.6018,77.680494,77.680494,1683.0934,3203.344,3427.0759,39.466907,1273.3007,1286.6509,0.552395,10.00372,25604.95671,2.28527184315137
-c27,1129.478,7471.505,1949.97,3356.913,1731.6018,157.113483,77.680494,1683.0934,3203.344,3427.0759,1299.926,39.466907,39.466907,1.117251,10.00372,25577.755862,2.28770213188102
-c28,1129.478,7471.505,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,157.113483,39.466907,19.513340,29.645681,0.839227,0.021821,27192.717659,2.1518366557397
-c29,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,3203.344,3427.0759,1299.926,19.513340,19.513340,0.839227,0.010789,25567.913526,2.28858277992213
-c30,1108.984,7471.505,1949.97,3356.913,1731.6018,118.016247,157.113483,1683.0934,3203.344,3427.0759,1299.926,29.645681,29.645681,0.839227,0.021821,25567.69524,2.28860231885883
-c31,1129.478,7471.505,1949.97,3356.913,1731.6018,118.016247,3366.9575,1683.0934,3203.344,157.113483,1299.926,19.513340,1286.6509,0.839227,0.021821,26774.943718,2.18541212413908
-c32,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,3366.9575,1683.0934,3203.344,3427.0759,1299.926,39.466907,29.645681,57.16078,0.010789,27769.40824,2.10714920989923
-c2,20415.724336
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,21.220352,210.33129,0.922624,0.018020,4378.669032,2.17061894184701
-c2,352.4661,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,0.922624,0.018020,4374.886302,2.17249575981104
-c3,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,21.220352,210.33129,0.922624,0.018020,4469.185332,2.12665647971261
-c4,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,21.220352,210.33129,0.922624,0.018020,4469.185332,2.12665647971261
-c5,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,4787.801446,1.98513285621467
-c6,352.4661,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,83.036160,543.5573,208.37216,206.54856,21.220352,0.922624,0.018020,4835.407442,1.96558864489607
-c7,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,7.640322,0.018020,4794.519144,1.98235144637163
-c8,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,527.54432,83.036160,208.37216,21.220352,21.220352,7.640322,1.669613,4732.951845,2.00813831799849
-c9,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,527.54432,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,4724.582554,2.01169560446852
-c10,442.9824,1550.099,426.5549,652.9992,41.518080,82.574848,590.3019,290.68457,527.54432,543.5573,21.220352,21.220352,21.220352,0.922624,0.018020,5213.418218,1.82306916082002
-c11,442.9824,1550.099,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,5249.395298,1.81057463524678
-c12,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,83.036160,543.5573,208.37216,206.54856,210.33129,0.922624,0.018020,5115.03468,1.85813441487489
-c13,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,4787.801446,1.98513285621467
-c14,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,4787.801446,1.98513285621467
-c15,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,83.036160,543.5573,208.37216,21.220352,21.220352,0.922624,0.018020,4740.595534,2.00490041585352
-c16,442.9824,1610.85,426.5549,652.9992,41.518080,82.574848,590.3019,290.68457,83.036160,543.5573,208.37216,206.54856,210.33129,7.640322,0.018020,5397.96971,1.76074014722954
-c17,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,83.036160,543.5573,208.37216,21.220352,21.220352,0.922624,0.018020,4740.595534,2.00490041585352
-c18,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,527.54432,83.036160,208.37216,206.54856,21.220352,0.922624,0.018020,4909.910762,1.93576266965638
-c19,352.4661,1550.099,426.5549,652.9992,41.518080,82.574848,82.574848,290.68457,527.54432,543.5573,208.37216,21.220352,21.220352,7.640322,1.669613,4810.695965,1.97568543711356
-c20,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,83.036160,543.5573,208.37216,21.220352,21.220352,0.922624,0.018020,4740.595534,2.00490041585352
-c21,442.9824,1610.85,426.5549,652.9992,41.518080,544.1687,590.3019,290.68457,83.036160,543.5573,208.37216,206.54856,21.220352,0.922624,0.018020,5663.734926,1.67811914123258
-c22,442.9824,1610.85,426.5549,652.9992,41.518080,544.1687,590.3019,290.68457,83.036160,543.5573,21.220352,206.54856,210.33129,0.922624,0.018020,5665.694056,1.67753886748983
-c23,442.9824,1550.099,426.5549,652.9992,41.518080,544.1687,590.3019,290.68457,527.54432,83.036160,21.220352,21.220352,21.220352,0.922624,1.669613,5216.142523,1.82211700195683
-c24,352.4661,1610.85,426.5549,652.9992,41.518080,544.1687,590.3019,290.68457,527.54432,83.036160,208.37216,21.220352,21.220352,7.640322,0.018020,5378.595136,1.7670826193399
-c25,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,527.54432,543.5573,208.37216,206.54856,210.33129,7.640322,1.669613,5567.912131,1.70699927794893
-c26,442.9824,1610.85,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,527.54432,543.5573,21.220352,206.54856,210.33129,0.922624,1.669613,5434.793625,1.74881010005582
-c27,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,527.54432,543.5573,208.37216,21.220352,21.220352,0.922624,1.669613,5186.755287,1.83244079368418
-c28,442.9824,1550.099,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,83.036160,21.220352,21.220352,21.220352,0.922624,0.018020,5506.75165,1.72595798566732
-c29,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,527.54432,543.5573,208.37216,21.220352,21.220352,0.922624,0.018020,5185.103694,1.8330244746495
-c30,352.4661,1550.099,426.5549,652.9992,333.7788,82.574848,82.574848,290.68457,527.54432,543.5573,208.37216,21.220352,21.220352,0.922624,0.018020,5094.587394,1.86559209537446
-c31,442.9824,1550.099,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,527.54432,83.036160,208.37216,21.220352,210.33129,0.922624,0.018020,5421.420544,1.7531239101542
-c32,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,527.54432,543.5573,208.37216,21.220352,21.220352,7.640322,0.018020,5669.783144,1.67632901452767
-c2,4374.886302
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,7471.505,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,277.897139,357.330128,1299.926,69.928921,1286.6509,2.857198,0.543719,21386.505307,2.73603778314562
-c2,1108.984,7471.505,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,277.897139,357.330128,1299.926,1273.3007,80.061262,2.857198,0.543719,21383.287448,2.73644951514824
-c3,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,277.897139,357.330128,1299.926,80.061262,1286.6509,2.857198,0.543719,21870.163448,2.67553037335887
-c4,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,277.897139,357.330128,1299.926,69.928921,1286.6509,2.857198,0.543719,21860.031107,2.67677050818951
-c5,1145.5648,7908.45,2073.98,3356.913,1731.6018,352.195669,3366.9575,1683.0934,277.897139,357.330128,1299.926,69.928921,80.061262,2.857198,0.532687,23707.289504,2.46819808676599
-c6,1108.984,7471.505,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,277.897139,3427.0759,1299.926,1273.3007,80.061262,2.857198,0.543719,24453.03322,2.39292549424294
-c7,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,3366.9575,1683.0934,277.897139,277.897139,1299.926,69.928921,80.061262,57.16078,0.543719,23643.073893,2.4749018194641
-c8,1145.5648,7908.45,2073.98,3356.913,1731.6018,352.195669,352.195669,1683.0934,3203.344,357.330128,1299.926,69.928921,80.061262,57.16078,10.00372,23681.749149,2.47085999550776
-c9,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,3203.344,357.330128,1299.926,69.928921,80.061262,2.857198,0.543719,23578.88833,2.48163890408637
-c10,1145.5648,7908.45,2073.98,3356.913,139.46418,272.76268,3366.9575,1683.0934,3203.344,3427.0759,69.928921,69.928921,89.882488,3.135222,0.538289,26811.019301,2.1824715415639
-c11,1145.5648,7908.45,2073.98,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,277.897139,277.897139,1299.926,69.928921,80.061262,2.570366,0.543719,26328.639546,2.22245765940626
-c12,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,318.232892,3427.0759,1299.926,1273.3007,1286.6509,2.857198,0.543719,26173.484411,2.23563227962284
-c13,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,3366.9575,1683.0934,277.897139,357.330128,1299.926,69.928921,89.882488,2.857198,0.538289,23678.019096,2.47124923578426
-c14,1145.5648,7908.45,2073.98,3356.913,1731.6018,352.195669,3366.9575,1683.0934,277.897139,357.330128,1299.926,69.928921,80.061262,2.857198,0.543719,23707.300536,2.46819693821003
-c15,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,277.897139,3427.0759,1299.926,69.928921,69.928921,2.857198,0.543719,23713.0549,2.46759798950409
-c16,1145.5648,6358.145,1949.97,3356.913,159.632057,313.098433,3366.9575,1683.0934,318.232892,3427.0759,1299.926,1273.3007,1286.6509,57.16078,0.538289,25996.259651,2.25087329498426
-c17,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,277.897139,3427.0759,1299.926,69.928921,80.061262,2.570366,0.543719,23722.900409,2.46657388377955
-c18,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,3203.344,357.330128,1299.926,1273.3007,80.061262,2.857198,0.543719,24782.260109,2.36113600412241
-c19,1108.984,7471.505,2073.98,3356.913,179.180675,352.195669,313.098433,1683.0934,3203.344,3427.0759,1299.926,89.882488,69.928921,57.16078,10.00372,24696.271986,2.3693570689226
-c20,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,318.232892,3427.0759,1299.926,69.928921,69.928921,2.857198,0.543719,23753.390653,2.46340774887517
-c21,1145.5648,6358.145,1949.97,3356.913,159.632057,3053.2545,3366.9575,1683.0934,318.232892,3427.0759,1299.926,1273.3007,69.928921,2.857198,0.538289,27465.390157,2.13047352669918
-c22,1145.5648,6358.145,1949.97,3356.913,179.180675,3053.2545,3366.9575,1683.0934,318.232892,3427.0759,69.928921,1273.3007,1286.6509,2.857198,0.538289,27471.663675,2.12998700487335
-c23,1145.5648,7908.45,2073.98,3356.913,159.632057,3053.2545,3366.9575,1683.0934,3203.344,357.330128,89.882488,69.928921,89.882488,3.135222,10.00372,26571.352224,2.20215689926132
-c24,1108.984,6756.043,1949.97,3356.913,139.46418,3053.2545,3366.9575,1683.0934,3203.344,318.232892,1299.926,89.882488,80.061262,57.16078,0.532687,26463.819689,2.21110509781064
-c25,1145.5648,7908.45,2073.98,3356.913,1731.6018,352.195669,272.76268,1683.0934,3203.344,3427.0759,1299.926,1273.3007,1286.6509,57.16078,10.00372,29082.023349,2.0120431766247
-c26,1145.5648,6358.145,1949.97,3356.913,1731.6018,272.76268,272.76268,1683.0934,3203.344,3427.0759,89.882488,1273.3007,1286.6509,2.570366,10.00372,26063.641434,2.24505415970628
-c27,1145.5648,7908.45,2073.98,3356.913,1731.6018,352.195669,272.76268,1683.0934,3203.344,3427.0759,1299.926,89.882488,89.882488,3.135222,10.00372,26647.811167,2.19583838450037
-c28,1145.5648,7908.45,2073.98,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,357.330128,89.882488,69.928921,80.061262,2.857198,0.543719,28123.762716,2.08059949964129
-c29,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,3203.344,3427.0759,1299.926,69.928921,69.928921,2.857198,0.532687,26638.490729,2.19660667785648
-c30,1108.984,7471.505,2073.98,3356.913,1731.6018,313.098433,352.195669,1683.0934,3203.344,3427.0759,1299.926,80.061262,80.061262,2.857198,0.543719,26185.240643,2.23462856107758
-c31,1145.5648,7908.45,2073.98,3356.913,1731.6018,313.098433,3366.9575,1683.0934,3203.344,357.330128,1299.926,69.928921,1286.6509,2.857198,0.543719,27800.239799,2.10481229858397
-c32,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,3366.9575,1683.0934,3203.344,3427.0759,1299.926,89.882488,80.061262,57.16078,0.532687,28463.741486,2.05574824610481
-c2,21383.287448
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,151.959867,151.959867,0,153.127903,153.127903,0,38.454652,0,1.156348,0.102999,649.889539,0
-c2,0,0,0,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,1.156348,0.102999,649.889539,0
-c3,0,0,0,0,0,151.959867,151.959867,0,153.127903,153.127903,0,38.454652,0,1.156348,0.102999,649.889539,0
-c4,0,0,0,0,0,151.959867,151.959867,0,153.127903,153.127903,0,38.454652,0,1.156348,0.102999,649.889539,0
-c5,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,536.384324,0
-c6,0,0,0,0,0,151.959867,151.959867,0,153.127903,0,0,0,38.454652,1.156348,0.102999,496.761636,0
-c7,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,0,0.102999,535.227976,0
-c8,0,0,0,0,0,151.959867,151.959867,0,0,153.127903,0,38.454652,38.454652,0,0,533.956941,0
-c9,0,0,0,0,0,151.959867,151.959867,0,0,153.127903,0,38.454652,38.454652,1.156348,0.102999,535.216288,0
-c10,0,0,0,0,76.708720,151.959867,0,0,0,0,38.454652,38.454652,38.454652,1.156348,0.102999,345.29189,0
-c11,0,0,0,0,0,0,0,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,384.424457,0
-c12,0,0,0,0,0,151.959867,151.959867,0,153.127903,0,0,0,0,1.156348,0.102999,458.306984,0
-c13,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,536.384324,0
-c14,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,536.384324,0
-c15,0,0,0,0,0,151.959867,151.959867,0,153.127903,0,0,38.454652,38.454652,1.156348,0.102999,535.216288,0
-c16,0,0,0,0,76.708720,151.959867,0,0,153.127903,0,0,0,0,0,0.102999,381.899489,0
-c17,0,0,0,0,0,151.959867,151.959867,0,153.127903,0,0,38.454652,38.454652,1.156348,0.102999,535.216288,0
-c18,0,0,0,0,0,151.959867,151.959867,0,0,153.127903,0,0,38.454652,1.156348,0.102999,496.761636,0
-c19,0,0,0,0,76.708720,151.959867,151.959867,0,0,0,0,38.454652,38.454652,0,0,457.537758,0
-c20,0,0,0,0,0,151.959867,151.959867,0,153.127903,0,0,38.454652,38.454652,1.156348,0.102999,535.216288,0
-c21,0,0,0,0,76.708720,0,0,0,153.127903,0,0,0,38.454652,1.156348,0.102999,269.550622,0
-c22,0,0,0,0,76.708720,0,0,0,153.127903,0,38.454652,0,0,1.156348,0.102999,269.550622,0
-c23,0,0,0,0,76.708720,0,0,0,0,153.127903,38.454652,38.454652,38.454652,1.156348,0,346.356927,0
-c24,0,0,0,0,76.708720,0,0,0,0,153.127903,0,38.454652,38.454652,0,0.102999,306.848926,0
-c25,0,0,0,0,0,151.959867,151.959867,0,0,0,0,0,0,0,0,303.919734,0
-c26,0,0,0,0,0,151.959867,151.959867,0,0,0,38.454652,0,0,1.156348,0,343.530734,0
-c27,0,0,0,0,0,151.959867,151.959867,0,0,0,0,38.454652,38.454652,1.156348,0,381.985386,0
-c28,0,0,0,0,0,0,0,0,0,153.127903,38.454652,38.454652,38.454652,1.156348,0.102999,269.751206,0
-c29,0,0,0,0,0,151.959867,151.959867,0,0,0,0,38.454652,38.454652,1.156348,0.102999,382.088385,0
-c30,0,0,0,0,0,151.959867,151.959867,0,0,0,0,38.454652,38.454652,1.156348,0.102999,382.088385,0
-c31,0,0,0,0,0,151.959867,0,0,0,153.127903,0,38.454652,0,1.156348,0.102999,344.801769,0
-c32,0,0,0,0,0,151.959867,0,0,0,0,0,38.454652,38.454652,0,0.102999,228.97217,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,43.122319,43.122319,0,47.088742,47.088742,0,11.960929,0,0.861623,0.418899,193.663573,0
-c2,0,0,0,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0.861623,0.418899,193.663573,0
-c3,0,0,0,0,0,43.122319,43.122319,0,47.088742,47.088742,0,11.960929,0,0.861623,0.418899,193.663573,0
-c4,0,0,0,0,0,43.122319,43.122319,0,47.088742,47.088742,0,11.960929,0,0.861623,0.418899,193.663573,0
-c5,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,162.502183,0
-c6,0,0,0,0,0,43.122319,43.122319,0,47.088742,0,0,0,11.960929,0.861623,0.418899,146.574831,0
-c7,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0,0.418899,161.64056,0
-c8,0,0,0,0,0,43.122319,43.122319,0,0,47.088742,0,11.960929,11.960929,0,0,157.255238,0
-c9,0,0,0,0,0,43.122319,43.122319,0,0,47.088742,0,11.960929,11.960929,0.861623,0.418899,158.53576,0
-c10,0,0,0,0,23.915213,43.122319,0,0,0,0,11.960929,11.960929,11.960929,0.861623,0.418899,104.200841,0
-c11,0,0,0,0,0,0,0,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,119.379864,0
-c12,0,0,0,0,0,43.122319,43.122319,0,47.088742,0,0,0,0,0.861623,0.418899,134.613902,0
-c13,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,162.502183,0
-c14,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,162.502183,0
-c15,0,0,0,0,0,43.122319,43.122319,0,47.088742,0,0,11.960929,11.960929,0.861623,0.418899,158.53576,0
-c16,0,0,0,0,23.915213,43.122319,0,0,47.088742,0,0,0,0,0,0.418899,114.545173,0
-c17,0,0,0,0,0,43.122319,43.122319,0,47.088742,0,0,11.960929,11.960929,0.861623,0.418899,158.53576,0
-c18,0,0,0,0,0,43.122319,43.122319,0,0,47.088742,0,0,11.960929,0.861623,0.418899,146.574831,0
-c19,0,0,0,0,23.915213,43.122319,43.122319,0,0,0,0,11.960929,11.960929,0,0,134.081709,0
-c20,0,0,0,0,0,43.122319,43.122319,0,47.088742,0,0,11.960929,11.960929,0.861623,0.418899,158.53576,0
-c21,0,0,0,0,23.915213,0,0,0,47.088742,0,0,0,11.960929,0.861623,0.418899,84.245406,0
-c22,0,0,0,0,23.915213,0,0,0,47.088742,0,11.960929,0,0,0.861623,0.418899,84.245406,0
-c23,0,0,0,0,23.915213,0,0,0,0,47.088742,11.960929,11.960929,11.960929,0.861623,0,107.748365,0
-c24,0,0,0,0,23.915213,0,0,0,0,47.088742,0,11.960929,11.960929,0,0.418899,95.344712,0
-c25,0,0,0,0,0,43.122319,43.122319,0,0,0,0,0,0,0,0,86.244638,0
-c26,0,0,0,0,0,43.122319,43.122319,0,0,0,11.960929,0,0,0.861623,0,99.06719,0
-c27,0,0,0,0,0,43.122319,43.122319,0,0,0,0,11.960929,11.960929,0.861623,0,111.028119,0
-c28,0,0,0,0,0,0,0,0,0,47.088742,11.960929,11.960929,11.960929,0.861623,0.418899,84.252051,0
-c29,0,0,0,0,0,43.122319,43.122319,0,0,0,0,11.960929,11.960929,0.861623,0.418899,111.447018,0
-c30,0,0,0,0,0,43.122319,43.122319,0,0,0,0,11.960929,11.960929,0.861623,0.418899,111.447018,0
-c31,0,0,0,0,0,43.122319,0,0,0,47.088742,0,11.960929,0,0.861623,0.418899,103.452512,0
-c32,0,0,0,0,0,43.122319,0,0,0,0,0,11.960929,11.960929,0,0.418899,67.463076,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,18.163317,18.163317,0,19.033555,19.033555,0,4.848960,0,0.281905,0.089271,79.61388,0
-c2,0,0,0,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0.281905,0.089271,79.61388,0
-c3,0,0,0,0,0,18.163317,18.163317,0,19.033555,19.033555,0,4.848960,0,0.281905,0.089271,79.61388,0
-c4,0,0,0,0,0,18.163317,18.163317,0,19.033555,19.033555,0,4.848960,0,0.281905,0.089271,79.61388,0
-c5,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,66.299523,0
-c6,0,0,0,0,0,18.163317,18.163317,0,19.033555,0,0,0,4.848960,0.281905,0.089271,60.580325,0
-c7,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0,0.089271,66.017618,0
-c8,0,0,0,0,0,18.163317,18.163317,0,0,19.033555,0,4.848960,4.848960,0,0,65.058109,0
-c9,0,0,0,0,0,18.163317,18.163317,0,0,19.033555,0,4.848960,4.848960,0.281905,0.089271,65.429285,0
-c10,0,0,0,0,9.597736,18.163317,0,0,0,0,4.848960,4.848960,4.848960,0.281905,0.089271,42.679109,0
-c11,0,0,0,0,0,0,0,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,48.136206,0
-c12,0,0,0,0,0,18.163317,18.163317,0,19.033555,0,0,0,0,0.281905,0.089271,55.731365,0
-c13,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,66.299523,0
-c14,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,66.299523,0
-c15,0,0,0,0,0,18.163317,18.163317,0,19.033555,0,0,4.848960,4.848960,0.281905,0.089271,65.429285,0
-c16,0,0,0,0,9.597736,18.163317,0,0,19.033555,0,0,0,0,0,0.089271,46.883879,0
-c17,0,0,0,0,0,18.163317,18.163317,0,19.033555,0,0,4.848960,4.848960,0.281905,0.089271,65.429285,0
-c18,0,0,0,0,0,18.163317,18.163317,0,0,19.033555,0,0,4.848960,0.281905,0.089271,60.580325,0
-c19,0,0,0,0,9.597736,18.163317,18.163317,0,0,0,0,4.848960,4.848960,0,0,55.62229,0
-c20,0,0,0,0,0,18.163317,18.163317,0,19.033555,0,0,4.848960,4.848960,0.281905,0.089271,65.429285,0
-c21,0,0,0,0,9.597736,0,0,0,19.033555,0,0,0,4.848960,0.281905,0.089271,33.851427,0
-c22,0,0,0,0,9.597736,0,0,0,19.033555,0,4.848960,0,0,0.281905,0.089271,33.851427,0
-c23,0,0,0,0,9.597736,0,0,0,0,19.033555,4.848960,4.848960,4.848960,0.281905,0,43.460076,0
-c24,0,0,0,0,9.597736,0,0,0,0,19.033555,0,4.848960,4.848960,0,0.089271,38.418482,0
-c25,0,0,0,0,0,18.163317,18.163317,0,0,0,0,0,0,0,0,36.326634,0
-c26,0,0,0,0,0,18.163317,18.163317,0,0,0,4.848960,0,0,0.281905,0,41.457499,0
-c27,0,0,0,0,0,18.163317,18.163317,0,0,0,0,4.848960,4.848960,0.281905,0,46.306459,0
-c28,0,0,0,0,0,0,0,0,0,19.033555,4.848960,4.848960,4.848960,0.281905,0.089271,33.951611,0
-c29,0,0,0,0,0,18.163317,18.163317,0,0,0,0,4.848960,4.848960,0.281905,0.089271,46.39573,0
-c30,0,0,0,0,0,18.163317,18.163317,0,0,0,0,4.848960,4.848960,0.281905,0.089271,46.39573,0
-c31,0,0,0,0,0,18.163317,0,0,0,19.033555,0,4.848960,0,0.281905,0.089271,42.417008,0
-c32,0,0,0,0,0,18.163317,0,0,0,0,0,4.848960,4.848960,0,0.089271,27.950508,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,124.01,0,0,0,0,0,0,0,0,0,0,0,0,124.01,0
-c2,0,0,124.01,0,0,0,0,0,0,0,0,0,0,0,0,124.01,0
-c3,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c4,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c5,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c6,0,0,124.01,0,0,0,0,0,0,0,0,0,0,0,0,124.01,0
-c7,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c8,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c9,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c10,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c11,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c12,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c13,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c14,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c15,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c16,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c17,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c18,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c19,0,0,124.01,0,0,0,0,0,0,0,0,0,0,0,0,124.01,0
-c20,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c21,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c22,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c23,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c24,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c25,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c26,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.0868,0
-c27,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c28,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c29,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c30,0,0,124.01,0,0,0,0,0,0,0,0,0,0,0,0,124.01,0
-c31,16.0868,436.945,124.01,0,0,0,0,0,0,0,0,0,0,0,0,577.0418,0
-c32,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,28.5399,0
-c2,0,0,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,28.5399,0
-c3,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c4,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c5,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c6,0,0,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,28.5399,0
-c7,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c8,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c9,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c10,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c11,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c12,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c13,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c14,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c15,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c16,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c17,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c18,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c19,0,0,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,28.5399,0
-c20,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c21,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c22,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c23,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c24,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c25,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c26,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.62006,0
-c27,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c28,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c29,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c30,0,0,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,28.5399,0
-c31,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,119.49066,0
-c32,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,1550.099,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,26.069312,210.33129,1.204529,0.107291,4486.822812,2.11829669777705
-c2,352.4661,1550.099,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,1.204529,0.107291,4483.040082,2.12008408850795
-c3,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,26.069312,210.33129,1.204529,0.107291,4668.289872,2.03595368218485
-c4,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,26.069312,210.33129,1.204529,0.107291,4668.289872,2.03595368218485
-c5,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,4973.591629,1.91097755422538
-c6,352.4661,1550.099,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,102.069715,543.5573,208.37216,206.54856,26.069312,1.204529,0.107291,4924.527667,1.93001696968602
-c7,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,7.640322,0.107291,4980.027422,1.90850795824176
-c8,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,527.54432,102.069715,208.37216,26.069312,26.069312,7.640322,1.669613,4917.500614,1.93277494214513
-c9,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,527.54432,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,4909.502499,1.93592364324971
-c10,450.60246,1633.4297,455.0948,652.9992,51.115816,100.738165,590.3019,290.68457,527.54432,543.5573,26.069312,26.069312,26.069312,1.204529,0.107291,5375.587987,1.76807114015766
-c11,450.60246,1633.4297,455.0948,652.9992,333.7788,544.1687,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,5417.022164,1.75454736842485
-c12,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,102.069715,543.5573,208.37216,206.54856,210.33129,1.204529,0.107291,5290.256705,1.79658994040082
-c13,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,4973.591629,1.91097755422538
-c14,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,4973.591629,1.91097755422538
-c15,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,102.069715,543.5573,208.37216,26.069312,26.069312,1.204529,0.107291,4925.515479,1.92962990484128
-c16,450.60246,1610.85,426.5549,652.9992,51.115816,100.738165,590.3019,290.68457,102.069715,543.5573,208.37216,206.54856,210.33129,7.640322,0.107291,5452.473649,1.74313946210986
-c17,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,102.069715,543.5573,208.37216,26.069312,26.069312,1.204529,0.107291,4925.515479,1.92962990484128
-c18,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,527.54432,102.069715,208.37216,206.54856,26.069312,1.204529,0.107291,5089.981747,1.86728016792473
-c19,352.4661,1550.099,455.0948,652.9992,51.115816,100.738165,100.738165,290.68457,527.54432,543.5573,208.37216,26.069312,26.069312,7.640322,1.669613,4894.858155,1.9417155028527
-c20,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,102.069715,543.5573,208.37216,26.069312,26.069312,1.204529,0.107291,4925.515479,1.92962990484128
-c21,450.60246,1610.85,426.5549,652.9992,51.115816,544.1687,590.3019,290.68457,102.069715,543.5573,208.37216,206.54856,26.069312,1.204529,0.107291,5705.206413,1.6659207929359
-c22,450.60246,1610.85,426.5549,652.9992,51.115816,544.1687,590.3019,290.68457,102.069715,543.5573,26.069312,206.54856,210.33129,1.204529,0.107291,5707.165543,1.66534892318351
-c23,450.60246,1633.4297,455.0948,652.9992,51.115816,544.1687,590.3019,290.68457,527.54432,102.069715,26.069312,26.069312,26.069312,1.204529,1.669613,5379.093259,1.76691898126247
-c24,352.4661,1720.485,426.5549,652.9992,51.115816,544.1687,590.3019,290.68457,527.54432,102.069715,208.37216,26.069312,26.069312,7.640322,0.107291,5526.648618,1.71974421443588
-c25,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,527.54432,543.5573,208.37216,206.54856,210.33129,7.640322,1.669613,5723.729425,1.66052957542574
-c26,450.60246,1610.85,426.5549,652.9992,333.7788,100.738165,100.738165,290.68457,527.54432,543.5573,26.069312,206.54856,210.33129,1.204529,1.669613,5483.871184,1.7331592347418
-c27,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,527.54432,543.5573,208.37216,26.069312,26.069312,1.204529,1.669613,5352.552406,1.77568032211658
-c28,450.60246,1633.4297,455.0948,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,102.069715,26.069312,26.069312,26.069312,1.204529,0.107291,5660.193921,1.67916896889708
-c29,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,527.54432,543.5573,208.37216,26.069312,26.069312,1.204529,0.107291,5350.990084,1.77619876530874
-c30,352.4661,1550.099,455.0948,652.9992,333.7788,100.738165,100.738165,290.68457,527.54432,543.5573,208.37216,26.069312,26.069312,1.204529,0.107291,5169.523024,1.83854911372285
-c31,450.60246,1633.4297,455.0948,652.9992,333.7788,100.738165,590.3019,290.68457,527.54432,102.069715,208.37216,26.069312,210.33129,1.204529,0.107291,5583.328212,1.70228609655151
-c32,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,590.3019,290.68457,527.54432,543.5573,208.37216,26.069312,26.069312,7.640322,0.107291,5807.368652,1.63661419893937
-c2,4483.040082
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_results2.csv b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_results2.csv
deleted file mode 100644
index d02d164d68835e458f2902e02bbd6c3acff2e1c7..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_promise_results2.csv
+++ /dev/null
@@ -1,616 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,174.570537,1949.97,6973.454,3475.891,77.680494,6530.41,1683.0934,3203.344,3427.0759,1299.926,19.513340,29.645681,69.37799,10.00372,30032.940062,1.94833694361489
-c2,1108.984,174.570537,1949.97,6973.454,3475.891,118.016247,3366.9575,1683.0934,157.113483,77.680494,1299.926,2376.5679,1286.6509,1.117251,10.00372,24059.996432,2.43201559755736
-c3,1108.984,174.570537,1949.97,6973.454,3475.891,77.680494,3366.9575,1683.0934,6053.9348,3427.0759,1299.926,2376.5679,39.466907,1.117251,10.00372,32018.693409,1.82750388695755
-c4,1129.478,174.570537,1949.97,6973.454,78.556742,157.113483,3366.9575,1683.0934,157.113483,3427.0759,1299.926,39.466907,29.645681,1.117251,10.00372,20477.542604,2.85748576813223
-c5,1129.478,6358.145,3792.343,157.113483,78.556742,157.113483,6530.41,1683.0934,77.680494,77.680494,29.645681,1273.3007,2353.8847,0.552395,0.021821,23699.019393,2.46905939970569
-c6,1108.984,7471.505,1949.97,6973.454,1731.6018,3053.2545,3366.9575,78.556742,77.680494,118.016247,39.466907,39.466907,39.466907,57.16078,10.00372,26115.545504,2.24059216415672
-c7,1108.984,174.570537,1949.97,6973.454,59.008124,77.680494,6530.41,1683.0934,157.113483,3427.0759,1299.926,2376.5679,29.645681,1.117251,0.021821,25848.638591,2.2637279874771
-c8,1108.984,7471.505,1949.97,157.113483,78.556742,6007.756,6530.41,3324.8076,157.113483,157.113483,1299.926,29.645681,19.513340,69.37799,0.021821,28361.814623,2.06313620670922
-c9,1108.984,174.570537,1949.97,6973.454,78.556742,118.016247,3366.9575,1683.0934,157.113483,3427.0759,1299.926,2376.5679,29.645681,0.552395,10.00372,22754.487505,2.57154930749319
-c10,1108.984,174.570537,1949.97,6973.454,78.556742,118.016247,3366.9575,1683.0934,157.113483,3427.0759,1299.926,2376.5679,29.645681,1.117251,10.00372,22755.052361,2.57148547309605
-c11,1108.984,174.570537,1949.97,6973.454,59.008124,6007.756,3366.9575,1683.0934,3203.344,77.680494,1299.926,2376.5679,29.645681,1.117251,10.00372,28322.078607,2.06603079697458
-c12,1108.984,7471.505,1949.97,6973.454,38.840247,77.680494,3366.9575,78.556742,157.113483,3427.0759,2479.9481,2376.5679,2353.8847,1.117251,10.00372,31871.659037,1.83593475920023
-c13,1108.984,7471.505,1949.97,157.113483,59.008124,6007.756,3366.9575,3324.8076,157.113483,3427.0759,19.513340,2376.5679,29.645681,1.117251,10.00372,29467.138982,1.98574712937584
-c14,1108.984,6358.145,3792.343,3356.913,1731.6018,6007.756,6530.41,38.840247,118.016247,118.016247,1299.926,29.645681,19.513340,0.839227,0.016391,30510.96618,1.91781165847101
-c15,1129.478,6358.145,3792.343,3356.913,38.840247,6007.756,157.113483,3324.8076,157.113483,3427.0759,39.466907,29.645681,19.513340,1.117251,0.016391,27839.345283,2.10185570235181
-c16,1108.984,6358.145,1949.97,3356.913,38.840247,118.016247,157.113483,1683.0934,157.113483,77.680494,1299.926,29.645681,19.513340,0.552395,1.812953,16357.319723,3.57725394356618
-c17,1129.478,6358.145,3792.343,3356.913,3475.891,3053.2545,77.680494,38.840247,3203.344,6493.6098,29.645681,1273.3007,29.645681,0.552395,10.00372,32322.647218,1.81031851343483
-c18,1108.984,174.570537,1949.97,6973.454,78.556742,77.680494,3366.9575,1683.0934,3203.344,3427.0759,1299.926,1273.3007,29.645681,1.117251,0.016391,24647.692596,2.37402695516794
-c19,1108.984,6358.145,1949.97,3356.913,3475.891,3053.2545,157.113483,3324.8076,77.680494,3427.0759,1299.926,29.645681,29.645681,57.16078,0.010789,27706.223908,2.11195458558714
-c20,1108.984,6358.145,1949.97,6973.454,78.556742,6007.756,157.113483,3324.8076,157.113483,6493.6098,29.645681,39.466907,29.645681,0.552395,10.00372,32718.824492,1.78839819500445
-c21,1108.984,6358.145,3792.343,3356.913,3475.891,3053.2545,77.680494,78.556742,157.113483,6493.6098,29.645681,1273.3007,2353.8847,0.839227,0.016391,31610.177718,1.85112172351273
-c22,1108.984,7471.505,1949.97,77.680494,59.008124,77.680494,3366.9575,1683.0934,157.113483,3427.0759,1299.926,2376.5679,29.645681,0.552395,10.00372,23095.764091,2.53355058352224
-c23,1129.478,6358.145,1949.97,6973.454,1731.6018,3053.2545,157.113483,78.556742,3203.344,3427.0759,1299.926,19.513340,29.645681,69.37799,10.00372,29490.460156,1.98417679259838
-c24,1108.984,6358.145,1949.97,6973.454,1731.6018,3053.2545,157.113483,78.556742,77.680494,3427.0759,1299.926,29.645681,29.645681,69.37799,0.010789,26344.44206,2.22112453502033
-c25,1129.478,6358.145,1949.97,3356.913,59.008124,77.680494,157.113483,1683.0934,157.113483,77.680494,1299.926,1273.3007,1286.6509,57.16078,1.812953,18925.046811,3.09189652835096
-c26,1108.984,6358.145,1949.97,3356.913,1731.6018,6007.756,77.680494,59.008124,3203.344,3427.0759,1299.926,2376.5679,29.645681,0.839227,10.00372,30997.460846,1.88771225310797
-c27,1108.984,6358.145,1949.97,3356.913,1731.6018,6007.756,77.680494,59.008124,3203.344,3427.0759,1299.926,2376.5679,39.466907,69.37799,10.00372,31075.820835,1.88295224655181
-c28,1108.984,6358.145,1949.97,6973.454,3475.891,3053.2545,157.113483,78.556742,3203.344,3427.0759,1299.926,29.645681,29.645681,69.37799,0.021821,31214.405798,1.87459236079035
-c29,1108.984,6358.145,1949.97,3356.913,1731.6018,6007.756,77.680494,78.556742,3203.344,3427.0759,1299.926,2376.5679,29.645681,0.839227,10.00372,31017.009464,1.88652251346999
-c30,1108.984,7471.505,3792.343,3356.913,78.556742,157.113483,3366.9575,1683.0934,157.113483,157.113483,39.466907,2376.5679,29.645681,1.117251,0.010789,23776.501619,2.46101329516616
-c31,1129.478,7471.505,3792.343,157.113483,3475.891,3053.2545,6530.41,3324.8076,6053.9348,118.016247,2479.9481,39.466907,1286.6509,1.117251,0.016391,38913.953179,1.50368394656467
-c32,1129.478,7471.505,3792.343,157.113483,1731.6018,77.680494,3366.9575,3324.8076,3203.344,77.680494,1299.926,29.645681,29.645681,1.117251,0.021821,25692.867805,2.27745252337567
-c33,1129.478,7471.505,1949.97,157.113483,3475.891,3053.2545,6530.41,3324.8076,6053.9348,118.016247,2479.9481,39.466907,1286.6509,1.117251,0.016391,37071.580179,1.57841360963365
-c34,1108.984,6358.145,1949.97,3356.913,59.008124,77.680494,6530.41,3324.8076,157.113483,157.113483,1299.926,39.466907,39.466907,0.839227,1.812953,24461.657178,2.39208186828886
-c35,1129.478,7471.505,1949.97,3356.913,38.840247,6007.756,77.680494,59.008124,6053.9348,6493.6098,1299.926,29.645681,1286.6509,0.552395,1.812953,35257.283394,1.65963684788585
-c36,1129.478,6358.145,1949.97,3356.913,1731.6018,3053.2545,6530.41,38.840247,3203.344,6493.6098,29.645681,1273.3007,39.466907,0.839227,10.00372,35198.822582,1.66239329569745
-c37,1129.478,7471.505,1949.97,6973.454,3475.891,118.016247,118.016247,1683.0934,157.113483,77.680494,1299.926,2376.5679,29.645681,1.117251,0.010789,26861.485492,2.17837120894114
-c38,1108.984,7471.505,1949.97,6973.454,1731.6018,157.113483,157.113483,1683.0934,118.016247,157.113483,1299.926,2376.5679,39.466907,0.552395,1.812953,25226.291051,2.31957549735489
-c39,1108.984,7471.505,3792.343,3356.913,59.008124,77.680494,6530.41,3324.8076,118.016247,3427.0759,29.645681,2376.5679,29.645681,69.37799,1.812953,31773.79357,1.8415895643663
-c40,1129.478,7471.505,3792.343,3356.913,1731.6018,118.016247,118.016247,3324.8076,157.113483,157.113483,1299.926,19.513340,1286.6509,0.839227,0.021821,23963.859148,2.44177226370095
-c41,1108.984,7471.505,1949.97,6973.454,3475.891,3053.2545,3366.9575,78.556742,157.113483,3427.0759,1299.926,1273.3007,19.513340,1.117251,10.00372,33666.623136,1.73805036616889
-c42,1129.478,7471.505,1949.97,6973.454,1731.6018,118.016247,118.016247,1683.0934,157.113483,157.113483,1299.926,2376.5679,1286.6509,1.117251,1.812953,26455.436664,2.21180573826795
-c43,1108.984,7471.505,1949.97,6973.454,78.556742,77.680494,3366.9575,1683.0934,157.113483,6493.6098,1299.926,2376.5679,29.645681,69.37799,10.00372,33146.44571,1.76532612812885
-c44,1108.984,7471.505,1949.97,6973.454,1731.6018,157.113483,3366.9575,1683.0934,118.016247,157.113483,39.466907,39.466907,39.466907,1.117251,10.00372,24847.330605,2.3549526320437
-c45,1108.984,6358.145,1949.97,6973.454,1731.6018,157.113483,118.016247,1683.0934,157.113483,3427.0759,39.466907,29.645681,29.645681,1.117251,10.00372,23774.446553,2.46122602544847
-c46,1108.984,7471.505,1949.97,118.016247,3475.891,77.680494,3366.9575,3324.8076,3203.344,3427.0759,1299.926,2376.5679,29.645681,1.117251,10.00372,31241.492293,1.87296708194743
-c47,1129.478,7471.505,1949.97,3356.913,3475.891,157.113483,77.680494,3324.8076,157.113483,3427.0759,2479.9481,1273.3007,1286.6509,0.839227,0.016391,29568.303278,1.97895314096841
-c48,1108.984,7471.505,1949.97,6973.454,3475.891,77.680494,3366.9575,1683.0934,157.113483,118.016247,1299.926,39.466907,29.645681,1.117251,0.016391,27752.837354,2.10840736339074
-c49,1129.478,7471.505,1949.97,6973.454,1731.6018,118.016247,118.016247,3324.8076,6053.9348,157.113483,1299.926,19.513340,1286.6509,57.16078,10.00372,31701.151917,1.84580947757423
-c50,1108.984,6358.145,1949.97,6973.454,59.008124,6007.756,3366.9575,1683.0934,157.113483,157.113483,1299.926,2376.5679,29.645681,1.117251,10.00372,31538.855542,1.85530786237777
-c51,1108.984,6358.145,1949.97,6973.454,3475.891,77.680494,3366.9575,1683.0934,157.113483,157.113483,1299.926,2376.5679,29.645681,57.16078,10.00372,29081.706441,2.01206510218048
-c16,16357.319723
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,90.532480,426.5549,1159.5933,558.563,82.574848,971.2723,290.68457,527.54432,543.5573,208.37216,21.220352,21.220352,12.419968,1.669613,5268.245563,1.80409623354347
-c2,352.4661,90.532480,426.5549,1159.5933,558.563,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,344.22248,210.33129,0.922624,1.669613,4482.861585,2.12016850526585
-c3,352.4661,90.532480,426.5549,1159.5933,558.563,82.574848,590.3019,290.68457,857.91333,543.5573,208.37216,344.22248,21.220352,0.922624,1.669613,5529.148957,1.71896652812556
-c4,442.9824,90.532480,426.5549,1159.5933,41.518080,82.574848,590.3019,290.68457,83.036160,543.5573,208.37216,21.220352,21.220352,0.922624,1.669613,4004.741039,2.37329251207814
-c5,442.9824,1610.85,701.7255,82.574848,41.518080,82.574848,971.2723,290.68457,83.036160,83.036160,21.220352,206.54856,351.94343,0.922624,0.018020,4970.907852,1.91200928477784
-c6,352.4661,1550.099,426.5549,1159.5933,333.7788,544.1687,590.3019,41.518080,83.036160,83.036160,21.220352,21.220352,21.220352,7.640322,1.669613,5237.524091,1.81467842656118
-c7,352.4661,90.532480,426.5549,1159.5933,41.518080,82.574848,971.2723,290.68457,83.036160,543.5573,208.37216,344.22248,21.220352,0.922624,0.018020,4616.545674,2.05877351233646
-c8,352.4661,1550.099,426.5549,82.574848,41.518080,925.6073,971.2723,490.07014,83.036160,83.036160,208.37216,21.220352,21.220352,12.419968,0.018020,5269.48584,1.80367160406542
-c9,352.4661,90.532480,426.5549,1159.5933,41.518080,82.574848,590.3019,290.68457,83.036160,543.5573,208.37216,344.22248,21.220352,0.922624,1.669613,4237.226867,2.24307600985775
-c10,352.4661,90.532480,426.5549,1159.5933,41.518080,82.574848,590.3019,290.68457,83.036160,543.5573,208.37216,344.22248,21.220352,0.922624,1.669613,4237.226867,2.24307600985775
-c11,352.4661,90.532480,426.5549,1159.5933,41.518080,925.6073,590.3019,290.68457,527.54432,83.036160,208.37216,344.22248,21.220352,0.922624,1.669613,5064.246339,1.87676928294919
-c12,352.4661,1550.099,426.5549,1159.5933,41.518080,82.574848,590.3019,41.518080,83.036160,543.5573,344.11179,344.22248,351.94343,0.922624,1.669613,5914.089605,1.6070811624593
-c13,352.4661,1550.099,426.5549,82.574848,41.518080,925.6073,590.3019,490.07014,83.036160,543.5573,21.220352,344.22248,21.220352,0.922624,1.669613,5475.041149,1.73595443865121
-c14,352.4661,1610.85,701.7255,652.9992,333.7788,925.6073,971.2723,41.518080,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,6008.043108,1.58194970125122
-c15,442.9824,1610.85,701.7255,652.9992,41.518080,925.6073,82.574848,490.07014,83.036160,543.5573,21.220352,21.220352,21.220352,0.922624,0.018020,5639.522628,1.68532385033417
-c16,352.4661,1610.85,426.5549,652.9992,41.518080,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,1.04789,3959.078244,2.40066533980162
-c17,442.9824,1610.85,701.7255,652.9992,558.563,544.1687,82.574848,41.518080,527.54432,883.36663,21.220352,206.54856,21.220352,0.922624,1.669613,6297.874179,1.50914764838862
-c18,352.4661,90.532480,426.5549,1159.5933,41.518080,82.574848,590.3019,290.68457,527.54432,543.5573,208.37216,206.54856,21.220352,0.922624,0.018020,4542.409514,2.09237452490122
-c19,352.4661,1610.85,426.5549,652.9992,558.563,544.1687,82.574848,490.07014,83.036160,543.5573,208.37216,21.220352,21.220352,7.640322,0.018020,5603.311554,1.69621515719461
-c20,352.4661,1610.85,426.5549,1159.5933,41.518080,925.6073,82.574848,490.07014,83.036160,883.36663,21.220352,21.220352,21.220352,0.922624,1.669613,6121.890751,1.55253048270984
-c21,352.4661,1610.85,701.7255,652.9992,558.563,544.1687,82.574848,41.518080,83.036160,883.36663,21.220352,206.54856,351.94343,0.922624,0.018020,6091.921204,1.56016824310572
-c22,352.4661,1550.099,426.5549,82.574848,41.518080,82.574848,590.3019,290.68457,83.036160,543.5573,208.37216,344.22248,21.220352,0.922624,1.669613,4619.774935,2.05733441260522
-c23,442.9824,1610.85,426.5549,1159.5933,333.7788,544.1687,82.574848,41.518080,527.54432,543.5573,208.37216,21.220352,21.220352,12.419968,1.669613,5978.025093,1.58989329270965
-c24,352.4661,1610.85,426.5549,1159.5933,333.7788,544.1687,82.574848,41.518080,83.036160,543.5573,208.37216,21.220352,21.220352,12.419968,0.018020,5441.34904,1.74670323727839
-c25,442.9824,1610.85,426.5549,652.9992,41.518080,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,206.54856,210.33129,7.640322,1.04789,4430.751388,2.14510386866455
-c26,352.4661,1610.85,426.5549,652.9992,333.7788,925.6073,82.574848,41.518080,527.54432,543.5573,208.37216,344.22248,21.220352,0.922624,1.669613,6073.858077,1.56480804803619
-c27,352.4661,1610.85,426.5549,652.9992,333.7788,925.6073,82.574848,41.518080,527.54432,543.5573,208.37216,344.22248,21.220352,12.419968,1.669613,6085.355421,1.56185158372442
-c28,352.4661,1610.85,426.5549,1159.5933,558.563,544.1687,82.574848,41.518080,527.54432,543.5573,208.37216,21.220352,21.220352,12.419968,0.018020,6110.6414,1.55538860494434
-c29,352.4661,1610.85,426.5549,652.9992,333.7788,925.6073,82.574848,41.518080,527.54432,543.5573,208.37216,344.22248,21.220352,0.922624,1.669613,6073.858077,1.56480804803619
-c30,352.4661,1550.099,701.7255,652.9992,41.518080,82.574848,590.3019,290.68457,83.036160,83.036160,21.220352,344.22248,21.220352,0.922624,0.018020,4816.045346,1.97349096153027
-c31,442.9824,1550.099,701.7255,82.574848,558.563,544.1687,971.2723,490.07014,857.91333,83.036160,344.11179,21.220352,210.33129,0.922624,0.018020,6859.009454,1.3856843445359
-c32,442.9824,1550.099,701.7255,82.574848,333.7788,82.574848,590.3019,490.07014,527.54432,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,5136.441424,1.8503904139842
-c33,442.9824,1550.099,426.5549,82.574848,558.563,544.1687,971.2723,490.07014,857.91333,83.036160,344.11179,21.220352,210.33129,0.922624,0.018020,6583.838854,1.44359882196474
-c34,352.4661,1610.85,426.5549,652.9992,41.518080,82.574848,971.2723,490.07014,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,1.04789,5047.161266,1.8831223075264
-c35,442.9824,1550.099,426.5549,652.9992,41.518080,925.6073,82.574848,41.518080,857.91333,883.36663,208.37216,21.220352,210.33129,0.922624,1.04789,6347.028084,1.49746021011209
-c36,442.9824,1610.85,426.5549,652.9992,333.7788,544.1687,971.2723,41.518080,527.54432,883.36663,21.220352,206.54856,21.220352,0.922624,1.669613,6686.616831,1.42140969881739
-c37,442.9824,1550.099,426.5549,1159.5933,558.563,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,344.22248,21.220352,0.922624,0.018020,5334.454822,1.78170446596193
-c38,352.4661,1550.099,426.5549,1159.5933,333.7788,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,344.22248,21.220352,0.922624,1.04789,5020.184192,1.89324168300872
-c39,352.4661,1550.099,701.7255,652.9992,41.518080,82.574848,971.2723,490.07014,83.036160,543.5573,21.220352,344.22248,21.220352,12.419968,1.04789,5869.44967,1.61930377300085
-c40,442.9824,1550.099,701.7255,652.9992,333.7788,82.574848,82.574848,490.07014,83.036160,83.036160,208.37216,21.220352,210.33129,0.922624,0.018020,4943.741502,1.92251596526707
-c41,352.4661,1550.099,426.5549,1159.5933,558.563,544.1687,590.3019,41.518080,83.036160,543.5573,208.37216,206.54856,21.220352,0.922624,1.669613,6288.591749,1.51137526273253
-c42,442.9824,1550.099,426.5549,1159.5933,333.7788,82.574848,82.574848,290.68457,83.036160,83.036160,208.37216,344.22248,210.33129,0.922624,1.04789,5299.81143,1.79335097185994
-c43,352.4661,1550.099,426.5549,1159.5933,41.518080,82.574848,590.3019,290.68457,83.036160,883.36663,208.37216,344.22248,21.220352,12.419968,1.669613,6048.100061,1.57147234751293
-c44,352.4661,1550.099,426.5549,1159.5933,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,21.220352,21.220352,21.220352,0.922624,1.669613,5018.379031,1.8939227009152
-c45,352.4661,1610.85,426.5549,1159.5933,333.7788,82.574848,82.574848,290.68457,83.036160,543.5573,21.220352,21.220352,21.220352,0.922624,1.669613,5031.924119,1.88882458168037
-c46,352.4661,1550.099,426.5549,82.574848,558.563,82.574848,590.3019,490.07014,527.54432,543.5573,208.37216,344.22248,21.220352,0.922624,1.669613,5780.713585,1.64416068255766
-c47,442.9824,1550.099,426.5549,652.9992,558.563,82.574848,82.574848,490.07014,83.036160,543.5573,344.11179,206.54856,210.33129,0.922624,0.018020,5674.94408,1.67480451904639
-c48,352.4661,1550.099,426.5549,1159.5933,558.563,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,0.922624,0.018020,5428.663446,1.75078489898368
-c49,442.9824,1550.099,426.5549,1159.5933,333.7788,82.574848,82.574848,490.07014,857.91333,83.036160,208.37216,21.220352,210.33129,7.640322,1.669613,5958.411463,1.595126831624
-c50,352.4661,1610.85,426.5549,1159.5933,41.518080,925.6073,590.3019,290.68457,83.036160,83.036160,208.37216,344.22248,21.220352,0.922624,1.669613,6140.055699,1.54793742420841
-c51,352.4661,1610.85,426.5549,1159.5933,558.563,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,344.22248,21.220352,7.640322,1.669613,5820.785865,1.63284171848088
-c16,3959.078244
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,472.88204,1949.97,7209.476,3475.891,272.76268,6530.41,1732.6793,3203.344,3427.0759,1299.926,69.928921,80.061262,69.37799,16.49564,30919.264733,1.89248635629098
-c2,1108.984,472.88204,1949.97,7209.476,3475.891,313.098433,3366.9575,1683.0934,357.330128,277.897139,1299.926,2388.8133,1319.7884,3.135222,10.00372,25237.246282,2.31856859331271
-c3,1108.984,472.88204,1949.97,7209.476,3475.891,272.76268,3366.9575,1683.0934,6117.8413,3521.5484,1299.926,2388.8133,89.882488,3.135222,10.00372,32971.16705,1.77471081253488
-c4,1145.5648,472.88204,1949.97,7209.476,179.180675,352.195669,3366.9575,1683.0934,357.330128,3427.0759,1299.926,89.882488,80.061262,3.135222,10.00372,21626.734804,2.70564591015435
-c5,1145.5648,6358.145,3996.263,375.615141,179.180675,352.195669,6530.41,1732.6793,277.897139,277.897139,80.061262,1273.3007,2365.9264,2.570366,0.543719,24948.25031,2.34542646804386
-c6,1108.984,7471.505,2073.98,7209.476,1802.5061,3053.2545,3366.9575,171.815177,277.897139,318.232892,89.882488,89.882488,89.882488,57.16078,10.00372,27191.420272,2.15193932654045
-c7,1108.984,472.88204,1949.97,7209.476,159.632057,272.76268,6530.41,1732.6793,357.330128,3427.0759,1299.926,2388.8133,80.061262,3.135222,0.543719,26993.681608,2.16770307496285
-c8,1108.984,7471.505,2073.98,375.615141,179.180675,6007.756,6530.41,3324.8076,357.330128,357.330128,1299.926,80.061262,69.928921,69.37799,0.543719,29306.736564,1.99661557388197
-c9,1108.984,472.88204,1949.97,7209.476,179.180675,313.098433,3366.9575,1683.0934,357.330128,3427.0759,1299.926,2388.8133,80.061262,2.570366,10.00372,23849.422724,2.45348859277702
-c10,1108.984,472.88204,1949.97,7209.476,179.180675,313.098433,3366.9575,1683.0934,357.330128,3427.0759,1299.926,2388.8133,80.061262,3.135222,10.00372,23849.98758,2.45343048508443
-c11,1108.984,472.88204,1949.97,7209.476,159.632057,6007.756,3519.4585,1683.0934,3203.344,277.897139,1299.926,2388.8133,80.061262,3.135222,10.00372,29374.43264,1.99201419005853
-c12,1108.984,7471.505,2073.98,7209.476,139.46418,272.76268,3366.9575,171.815177,357.330128,3427.0759,2492.7759,2376.5679,2353.8847,3.135222,10.00372,32835.718007,1.78203158683244
-c13,1108.984,7471.505,2073.98,375.615141,159.632057,6007.756,3519.4585,3383.8134,357.330128,3427.0759,69.928921,2376.5679,80.061262,3.135222,10.00372,30424.847151,1.92324011884979
-c14,1108.984,6756.043,3996.263,3610.731,1731.6018,6130.47,6530.41,132.098682,318.232892,318.232892,1299.926,80.061262,69.928921,2.857198,0.538289,32086.378936,1.82364880678336
-c15,1145.5648,6358.145,3996.263,3610.731,139.46418,6007.756,352.195669,3324.8076,357.330128,3427.0759,89.882488,80.061262,69.928921,3.135222,0.538289,28962.879459,2.0203200694807
-c16,1108.984,6756.043,1949.97,3356.913,139.46418,313.098433,352.195669,1683.0934,357.330128,277.897139,1299.926,80.061262,69.928921,2.570366,1.812953,17749.288451,3.29671167804094
-c17,1145.5648,6358.145,3996.263,3610.731,3586.141,3197.8545,272.76268,132.098682,3203.344,6559.316,80.061262,1273.3007,80.061262,2.570366,10.00372,33508.217972,1.7462667431992
-c18,1108.984,472.88204,1949.97,7209.476,179.180675,272.76268,3366.9575,1683.0934,3203.344,3427.0759,1299.926,1273.3007,80.061262,3.135222,0.538289,25530.687668,2.29191972322584
-c19,1108.984,6756.043,1949.97,3356.913,3586.141,3197.8545,352.195669,3324.8076,277.897139,3427.0759,1299.926,80.061262,80.061262,57.16078,0.532687,28855.623799,2.02782955058642
-c20,1108.984,6756.043,1949.97,7209.476,179.180675,6007.756,352.195669,3324.8076,357.330128,6493.6098,80.061262,89.882488,80.061262,2.570366,10.00372,34001.93197,1.72091064479913
-c21,1108.984,6756.043,3996.263,3610.731,3586.141,3197.8545,272.76268,171.815177,357.330128,6493.6098,80.061262,1273.3007,2365.9264,2.857198,0.538289,33274.218134,1.75854730625074
-c22,1108.984,7471.505,2073.98,296.182152,159.632057,272.76268,3366.9575,1683.0934,357.330128,3427.0759,1299.926,2388.8133,80.061262,2.570366,10.00372,23998.877465,2.43820931560305
-c23,1145.5648,6358.145,1949.97,7209.476,1802.5061,3053.2545,352.195669,171.815177,3203.344,3427.0759,1299.926,69.928921,80.061262,69.37799,16.49564,30209.136959,1.93697313262271
-c24,1108.984,6756.043,1949.97,7209.476,1802.5061,3053.2545,352.195669,171.815177,277.897139,3427.0759,1299.926,80.061262,80.061262,69.37799,0.532687,27639.176686,2.11707777319327
-c25,1145.5648,6358.145,1949.97,3356.913,159.632057,272.76268,352.195669,1683.0934,357.330128,277.897139,1299.926,1273.3007,1286.6509,57.16078,1.8312957,19832.3735487,2.95044294139928
-c26,1108.984,6756.043,1949.97,3356.913,1731.6018,6130.47,272.76268,152.266559,3203.344,3427.0759,1299.926,2388.8133,80.061262,2.857198,10.00372,31871.092419,1.83596739923856
-c27,1108.984,6756.043,1949.97,3356.913,1731.6018,6130.47,272.76268,152.266559,3203.344,3427.0759,1299.926,2388.8133,89.882488,69.37799,16.49564,31953.926357,1.83120803390913
-c28,1108.984,6756.043,1949.97,7209.476,3475.891,3197.8545,352.195669,171.815177,3203.344,3427.0759,1299.926,80.061262,80.061262,69.37799,0.543719,32382.619479,1.80696582314009
-c29,1108.984,6756.043,1949.97,3356.913,1731.6018,6130.47,272.76268,171.815177,3203.344,3427.0759,1299.926,2388.8133,80.061262,2.857198,10.00372,31890.641037,1.83484197108571
-c30,1108.984,7471.505,3792.343,3610.731,179.180675,352.195669,3366.9575,1683.0934,357.330128,357.330128,89.882488,2376.5679,80.061262,3.135222,0.532687,24829.830059,2.35661244834534
-c31,1145.5648,7908.45,3792.343,375.615141,3475.891,3197.8545,6654.867,3324.8076,6053.9348,318.232892,2479.9481,89.882488,1286.6509,3.135222,0.538289,40107.715732,1.45892842883649
-c32,1145.5648,7908.45,3792.343,375.615141,1731.6018,272.76268,3366.9575,3383.8134,3293.4507,277.897139,1299.926,80.061262,80.061262,3.135222,0.543719,27012.183625,2.16621830499563
-c33,1145.5648,7908.45,2073.98,375.615141,3475.891,3197.8545,6654.867,3324.8076,6053.9348,318.232892,2479.9481,89.882488,1286.6509,3.135222,0.538289,38389.352732,1.52423217705886
-c34,1108.984,6756.043,1949.97,3356.913,159.632057,272.76268,6530.41,3324.8076,357.330128,357.330128,1299.926,89.882488,89.882488,2.857198,1.812953,25658.54372,2.28049912939292
-c35,1145.5648,7908.45,2073.98,3356.913,139.46418,6007.756,272.76268,152.266559,6053.9348,6493.6098,1337.2918,80.061262,1286.6509,2.570366,1.812953,36313.0891,1.61138278599002
-c36,1145.5648,6358.145,1949.97,3356.913,1731.6018,3053.2545,6654.867,132.098682,3203.344,6559.316,80.061262,1273.3007,89.882488,2.857198,10.00372,35601.18015,1.64360525218823
-c37,1145.5648,7908.45,2073.98,7209.476,3475.891,313.098433,313.098433,1683.0934,357.330128,277.897139,1299.926,2388.8133,80.061262,3.135222,0.532687,28530.347804,2.05094894180369
-c38,1108.984,7471.505,2073.98,7209.476,1802.5061,352.195669,352.195669,1683.0934,318.232892,357.330128,1299.926,2388.8133,89.882488,2.570366,1.812953,26512.503965,2.20704489849552
-c39,1108.984,7471.505,3792.343,3610.731,159.632057,272.76268,6530.41,3324.8076,318.232892,3427.0759,80.061262,2376.5679,80.061262,69.37799,1.812953,32624.365496,1.79357623586018
-c40,1145.5648,7908.45,3792.343,3610.731,1731.6018,313.098433,313.098433,3324.8076,357.330128,357.330128,1299.926,69.928921,1286.6509,2.857198,0.543719,25514.26206,2.29339521856665
-c41,1108.984,7471.505,2073.98,7209.476,3475.891,3197.8545,3366.9575,171.815177,357.330128,3427.0759,1299.926,1273.3007,69.928921,3.135222,10.00372,34517.163768,1.6952229061104
-c42,1145.5648,7908.45,2073.98,7209.476,1802.5061,313.098433,313.098433,1683.0934,357.330128,357.330128,1299.926,2388.8133,1319.7884,3.135222,1.812953,28177.403297,2.07663871715127
-c43,1108.984,7471.505,2073.98,7209.476,179.180675,272.76268,3366.9575,1683.0934,357.330128,6493.6098,1337.2918,2388.8133,80.061262,69.37799,16.49564,34108.919175,1.71551277750063
-c44,1108.984,7471.505,2073.98,7209.476,1802.5061,352.195669,3366.9575,1683.0934,318.232892,357.330128,89.882488,89.882488,89.882488,3.135222,10.00372,26027.047095,2.2482107326505
-c45,1108.984,6756.043,1949.97,7209.476,1802.5061,352.195669,313.098433,1683.0934,357.330128,3427.0759,89.882488,80.061262,80.061262,3.135222,10.00372,25222.916584,2.31988582351851
-c46,1108.984,7471.505,2073.98,336.517905,3475.891,272.76268,3366.9575,3383.8134,3293.4507,3427.0759,1299.926,2388.8133,80.061262,3.135222,10.00372,31992.877589,1.82897854365626
-c47,1145.5648,7908.45,2073.98,3356.913,3586.141,352.195669,272.76268,3324.8076,357.330128,3427.0759,2492.7759,1307.6007,1286.6509,2.857198,0.538289,30895.643764,1.89393323863309
-c48,1108.984,7471.505,2073.98,7209.476,3475.891,272.76268,3366.9575,1683.0934,357.330128,318.232892,1299.926,89.882488,80.061262,3.135222,0.538289,28811.755861,2.03091706462479
-c49,1145.5648,7908.45,2073.98,7209.476,1802.5061,313.098433,313.098433,3324.8076,6053.9348,357.330128,1299.926,69.928921,1286.6509,57.16078,10.00372,33225.916615,1.76110375960172
-c50,1108.984,6756.043,1949.97,7209.476,159.632057,6007.756,3519.4585,1683.0934,357.330128,357.330128,1299.926,2388.8133,80.061262,3.135222,10.00372,32891.012717,1.77903572530781
-c51,1108.984,6756.043,1949.97,7209.476,3475.891,272.76268,3366.9575,1683.0934,357.330128,357.330128,1299.926,2388.8133,80.061262,57.16078,10.00372,30373.802898,1.92647219206804
-c16,17749.288451
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,188.762345,0,0,0,151.959867,0,0,0,0,0,38.454652,38.454652,0,0,417.631516,0
-c2,0,188.762345,0,0,0,151.959867,0,0,153.127903,153.127903,0,0,0,1.156348,0,648.134366,0
-c3,0,188.762345,0,0,0,151.959867,0,0,0,0,0,0,38.454652,1.156348,0,380.333212,0
-c4,0,188.762345,0,0,76.708720,151.959867,0,0,153.127903,0,0,38.454652,38.454652,1.156348,0,648.624487,0
-c5,0,0,0,157.595858,76.708720,151.959867,0,0,153.127903,153.127903,38.454652,0,0,1.156348,0.102999,732.23425,0
-c6,0,0,0,0,0,0,0,74.930729,153.127903,153.127903,38.454652,38.454652,38.454652,0,0,496.550491,0
-c7,0,188.762345,0,0,76.708720,151.959867,0,0,153.127903,0,0,0,38.454652,1.156348,0.102999,610.272834,0
-c8,0,0,0,157.595858,76.708720,0,0,0,153.127903,153.127903,0,38.454652,38.454652,0,0.102999,617.572687,0
-c9,0,188.762345,0,0,76.708720,151.959867,0,0,153.127903,0,0,0,38.454652,1.156348,0,610.169835,0
-c10,0,188.762345,0,0,76.708720,151.959867,0,0,153.127903,0,0,0,38.454652,1.156348,0,610.169835,0
-c11,0,188.762345,0,0,76.708720,0,0,0,0,153.127903,0,0,38.454652,1.156348,0,458.209968,0
-c12,0,0,0,0,76.708720,151.959867,0,74.930729,153.127903,0,0,0,0,1.156348,0,457.883567,0
-c13,0,0,0,157.595858,76.708720,0,0,0,153.127903,0,38.454652,0,38.454652,1.156348,0,465.498133,0
-c14,0,0,0,0,0,0,0,74.930729,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,459.355186,0
-c15,0,0,0,0,76.708720,0,151.959867,0,153.127903,0,38.454652,38.454652,38.454652,1.156348,0.102999,498.419793,0
-c16,0,0,0,0,76.708720,151.959867,151.959867,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0,764.949912,0
-c17,0,0,0,0,0,0,151.959867,74.930729,0,0,38.454652,0,38.454652,1.156348,0,304.956248,0
-c18,0,188.762345,0,0,76.708720,151.959867,0,0,0,0,0,0,38.454652,1.156348,0.102999,457.144931,0
-c19,0,0,0,0,0,0,151.959867,0,153.127903,0,0,38.454652,38.454652,0,0.102999,382.100073,0
-c20,0,0,0,0,76.708720,0,151.959867,0,153.127903,0,38.454652,38.454652,38.454652,1.156348,0,498.316794,0
-c21,0,0,0,0,0,0,151.959867,74.930729,153.127903,0,38.454652,0,0,1.156348,0.102999,419.732498,0
-c22,0,0,0,157.595858,76.708720,151.959867,0,0,153.127903,0,0,0,38.454652,1.156348,0,579.003348,0
-c23,0,0,0,0,0,0,151.959867,74.930729,0,0,0,38.454652,38.454652,0,0,303.7999,0
-c24,0,0,0,0,0,0,151.959867,74.930729,153.127903,0,0,38.454652,38.454652,0,0.102999,457.030802,0
-c25,0,0,0,0,76.708720,151.959867,151.959867,0,153.127903,153.127903,0,0,0,0,0,686.88426,0
-c26,0,0,0,0,0,0,151.959867,74.930729,0,0,0,0,38.454652,1.156348,0,266.501596,0
-c27,0,0,0,0,0,0,151.959867,74.930729,0,0,0,0,38.454652,0,0,265.345248,0
-c28,0,0,0,0,0,0,151.959867,74.930729,0,0,0,38.454652,38.454652,0,0.102999,303.902899,0
-c29,0,0,0,0,0,0,151.959867,74.930729,0,0,0,0,38.454652,1.156348,0,266.501596,0
-c30,0,0,0,0,76.708720,151.959867,0,0,153.127903,153.127903,38.454652,0,38.454652,1.156348,0.102999,613.093044,0
-c31,0,0,0,157.595858,0,0,0,0,0,153.127903,0,38.454652,0,1.156348,0.102999,350.43776,0
-c32,0,0,0,157.595858,0,151.959867,0,0,0,153.127903,0,38.454652,38.454652,1.156348,0.102999,540.852279,0
-c33,0,0,0,157.595858,0,0,0,0,0,153.127903,0,38.454652,0,1.156348,0.102999,350.43776,0
-c34,0,0,0,0,76.708720,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0,612.990045,0
-c35,0,0,0,0,76.708720,0,151.959867,74.930729,0,0,0,38.454652,0,1.156348,0,343.210316,0
-c36,0,0,0,0,0,0,0,74.930729,0,0,38.454652,0,38.454652,1.156348,0,152.996381,0
-c37,0,0,0,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,1.156348,0.102999,649.889539,0
-c38,0,0,0,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,38.454652,1.156348,0,649.78654,0
-c39,0,0,0,0,76.708720,151.959867,0,0,153.127903,0,38.454652,0,38.454652,0,0,458.705794,0
-c40,0,0,0,0,0,151.959867,151.959867,0,153.127903,153.127903,0,38.454652,0,1.156348,0.102999,649.889539,0
-c41,0,0,0,0,0,0,0,74.930729,153.127903,0,0,0,38.454652,1.156348,0,267.669632,0
-c42,0,0,0,0,0,151.959867,151.959867,0,153.127903,153.127903,0,0,0,1.156348,0,611.331888,0
-c43,0,0,0,0,76.708720,151.959867,0,0,153.127903,0,0,0,38.454652,0,0,420.251142,0
-c44,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,38.454652,38.454652,38.454652,1.156348,0,574.735977,0
-c45,0,0,0,0,0,151.959867,151.959867,0,153.127903,0,38.454652,38.454652,38.454652,1.156348,0,573.567941,0
-c46,0,0,0,157.595858,0,151.959867,0,0,0,0,0,0,38.454652,1.156348,0,349.166725,0
-c47,0,0,0,0,0,151.959867,151.959867,0,153.127903,0,0,0,0,1.156348,0.102999,458.306984,0
-c48,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,1.156348,0.102999,536.384324,0
-c49,0,0,0,0,0,151.959867,151.959867,0,0,153.127903,0,38.454652,0,0,0,495.502289,0
-c50,0,0,0,0,76.708720,0,0,0,153.127903,153.127903,0,0,38.454652,1.156348,0,422.575526,0
-c51,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,0,38.454652,0,0,496.670325,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,109.549158,0,0,0,43.122319,0,0,0,0,0,11.960929,11.960929,0,0,176.593335,0
-c2,0,109.549158,0,0,0,43.122319,0,0,47.088742,47.088742,0,0,0,0.861623,0,247.710584,0
-c3,0,109.549158,0,0,0,43.122319,0,0,0,0,0,0,11.960929,0.861623,0,165.494029,0
-c4,0,109.549158,0,0,23.915213,43.122319,0,0,47.088742,0,0,11.960929,11.960929,0.861623,0,248.458913,0
-c5,0,0,0,60.905800,23.915213,43.122319,0,0,47.088742,47.088742,11.960929,0,0,0.861623,0.418899,235.362267,0
-c6,0,0,0,0,0,0,0,18.327706,47.088742,47.088742,11.960929,11.960929,11.960929,0,0,148.387977,0
-c7,0,109.549158,0,0,23.915213,43.122319,0,0,47.088742,0,0,0,11.960929,0.861623,0.418899,236.916883,0
-c8,0,0,0,60.905800,23.915213,0,0,0,47.088742,47.088742,0,11.960929,11.960929,0,0.418899,203.339254,0
-c9,0,109.549158,0,0,23.915213,43.122319,0,0,47.088742,0,0,0,11.960929,0.861623,0,236.497984,0
-c10,0,109.549158,0,0,23.915213,43.122319,0,0,47.088742,0,0,0,11.960929,0.861623,0,236.497984,0
-c11,0,109.549158,0,0,23.915213,0,0,0,0,47.088742,0,0,11.960929,0.861623,0,193.375665,0
-c12,0,0,0,0,23.915213,43.122319,0,18.327706,47.088742,0,0,0,0,0.861623,0,133.315603,0
-c13,0,0,0,60.905800,23.915213,0,0,0,47.088742,0,11.960929,0,11.960929,0.861623,0,156.693236,0
-c14,0,0,0,0,0,0,0,18.327706,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,137.70757,0
-c15,0,0,0,0,23.915213,0,43.122319,0,47.088742,0,11.960929,11.960929,11.960929,0.861623,0.418899,151.289583,0
-c16,0,0,0,0,23.915213,43.122319,43.122319,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0,229.120816,0
-c17,0,0,0,0,0,0,43.122319,18.327706,0,0,11.960929,0,11.960929,0.861623,0,86.233506,0
-c18,0,109.549158,0,0,23.915213,43.122319,0,0,0,0,0,0,11.960929,0.861623,0.418899,189.828141,0
-c19,0,0,0,0,0,0,43.122319,0,47.088742,0,0,11.960929,11.960929,0,0.418899,114.551818,0
-c20,0,0,0,0,23.915213,0,43.122319,0,47.088742,0,11.960929,11.960929,11.960929,0.861623,0,150.870684,0
-c21,0,0,0,0,0,0,43.122319,18.327706,47.088742,0,11.960929,0,0,0.861623,0.418899,121.780218,0
-c22,0,0,0,60.905800,23.915213,43.122319,0,0,47.088742,0,0,0,11.960929,0.861623,0,187.854626,0
-c23,0,0,0,0,0,0,43.122319,18.327706,0,0,0,11.960929,11.960929,0,0,85.371883,0
-c24,0,0,0,0,0,0,43.122319,18.327706,47.088742,0,0,11.960929,11.960929,0,0.418899,132.879524,0
-c25,0,0,0,0,23.915213,43.122319,43.122319,0,47.088742,47.088742,0,0,0,0,0,204.337335,0
-c26,0,0,0,0,0,0,43.122319,18.327706,0,0,0,0,11.960929,0.861623,0,74.272577,0
-c27,0,0,0,0,0,0,43.122319,18.327706,0,0,0,0,11.960929,0,0,73.410954,0
-c28,0,0,0,0,0,0,43.122319,18.327706,0,0,0,11.960929,11.960929,0,0.418899,85.790782,0
-c29,0,0,0,0,0,0,43.122319,18.327706,0,0,0,0,11.960929,0.861623,0,74.272577,0
-c30,0,0,0,0,23.915213,43.122319,0,0,47.088742,47.088742,11.960929,0,11.960929,0.861623,0.418899,186.417396,0
-c31,0,0,0,60.905800,0,0,0,0,0,47.088742,0,11.960929,0,0.861623,0.418899,121.235993,0
-c32,0,0,0,60.905800,0,43.122319,0,0,0,47.088742,0,11.960929,11.960929,0.861623,0.418899,176.319241,0
-c33,0,0,0,60.905800,0,0,0,0,0,47.088742,0,11.960929,0,0.861623,0.418899,121.235993,0
-c34,0,0,0,0,23.915213,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0,185.998497,0
-c35,0,0,0,0,23.915213,0,43.122319,18.327706,0,0,0,11.960929,0,0.861623,0,98.18779,0
-c36,0,0,0,0,0,0,0,18.327706,0,0,11.960929,0,11.960929,0.861623,0,43.111187,0
-c37,0,0,0,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0.861623,0.418899,193.663573,0
-c38,0,0,0,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,11.960929,0.861623,0,193.244674,0
-c39,0,0,0,0,23.915213,43.122319,0,0,47.088742,0,11.960929,0,11.960929,0,0,138.048132,0
-c40,0,0,0,0,0,43.122319,43.122319,0,47.088742,47.088742,0,11.960929,0,0.861623,0.418899,193.663573,0
-c41,0,0,0,0,0,0,0,18.327706,47.088742,0,0,0,11.960929,0.861623,0,78.239,0
-c42,0,0,0,0,0,43.122319,43.122319,0,47.088742,47.088742,0,0,0,0.861623,0,181.283745,0
-c43,0,0,0,0,23.915213,43.122319,0,0,47.088742,0,0,0,11.960929,0,0,126.087203,0
-c44,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,11.960929,11.960929,11.960929,0.861623,0,174.044213,0
-c45,0,0,0,0,0,43.122319,43.122319,0,47.088742,0,11.960929,11.960929,11.960929,0.861623,0,170.07779,0
-c46,0,0,0,60.905800,0,43.122319,0,0,0,0,0,0,11.960929,0.861623,0,116.850671,0
-c47,0,0,0,0,0,43.122319,43.122319,0,47.088742,0,0,0,0,0.861623,0.418899,134.613902,0
-c48,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0.861623,0.418899,162.502183,0
-c49,0,0,0,0,0,43.122319,43.122319,0,0,47.088742,0,11.960929,0,0,0,145.294309,0
-c50,0,0,0,0,23.915213,0,0,0,47.088742,47.088742,0,0,11.960929,0.861623,0,130.915249,0
-c51,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,0,11.960929,0,0,149.260732,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,32.936441,0,0,0,18.163317,0,0,0,0,0,4.848960,4.848960,0,0,60.797678,0
-c2,0,32.936441,0,0,0,18.163317,0,0,19.033555,19.033555,0,0,0,0.281905,0,89.448773,0
-c3,0,32.936441,0,0,0,18.163317,0,0,0,0,0,0,4.848960,0.281905,0,56.230623,0
-c4,0,32.936441,0,0,9.597736,18.163317,0,0,19.033555,0,0,4.848960,4.848960,0.281905,0,89.710874,0
-c5,0,0,0,21.882908,9.597736,18.163317,0,0,19.033555,19.033555,4.848960,0,0,0.281905,0.089271,92.931207,0
-c6,0,0,0,0,0,0,0,8.429972,19.033555,19.033555,4.848960,4.848960,4.848960,0,0,61.043962,0
-c7,0,32.936441,0,0,9.597736,18.163317,0,0,19.033555,0,0,0,4.848960,0.281905,0.089271,84.951185,0
-c8,0,0,0,21.882908,9.597736,0,0,0,19.033555,19.033555,0,4.848960,4.848960,0,0.089271,79.334945,0
-c9,0,32.936441,0,0,9.597736,18.163317,0,0,19.033555,0,0,0,4.848960,0.281905,0,84.861914,0
-c10,0,32.936441,0,0,9.597736,18.163317,0,0,19.033555,0,0,0,4.848960,0.281905,0,84.861914,0
-c11,0,32.936441,0,0,9.597736,0,0,0,0,19.033555,0,0,4.848960,0.281905,0,66.698597,0
-c12,0,0,0,0,9.597736,18.163317,0,8.429972,19.033555,0,0,0,0,0.281905,0,55.506485,0
-c13,0,0,0,21.882908,9.597736,0,0,0,19.033555,0,4.848960,0,4.848960,0.281905,0,60.494024,0
-c14,0,0,0,0,0,0,0,8.429972,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,56.566178,0
-c15,0,0,0,0,9.597736,0,18.163317,0,19.033555,0,4.848960,4.848960,4.848960,0.281905,0.089271,61.712664,0
-c16,0,0,0,0,9.597736,18.163317,18.163317,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0,93.971305,0
-c17,0,0,0,0,0,0,18.163317,8.429972,0,0,4.848960,0,4.848960,0.281905,0,36.573114,0
-c18,0,32.936441,0,0,9.597736,18.163317,0,0,0,0,0,0,4.848960,0.281905,0.089271,65.91763,0
-c19,0,0,0,0,0,0,18.163317,0,19.033555,0,0,4.848960,4.848960,0,0.089271,46.984063,0
-c20,0,0,0,0,9.597736,0,18.163317,0,19.033555,0,4.848960,4.848960,4.848960,0.281905,0,61.623393,0
-c21,0,0,0,0,0,0,18.163317,8.429972,19.033555,0,4.848960,0,0,0.281905,0.089271,50.84698,0
-c22,0,0,0,21.882908,9.597736,18.163317,0,0,19.033555,0,0,0,4.848960,0.281905,0,73.808381,0
-c23,0,0,0,0,0,0,18.163317,8.429972,0,0,0,4.848960,4.848960,0,0,36.291209,0
-c24,0,0,0,0,0,0,18.163317,8.429972,19.033555,0,0,4.848960,4.848960,0,0.089271,55.414035,0
-c25,0,0,0,0,9.597736,18.163317,18.163317,0,19.033555,19.033555,0,0,0,0,0,83.99148,0
-c26,0,0,0,0,0,0,18.163317,8.429972,0,0,0,0,4.848960,0.281905,0,31.724154,0
-c27,0,0,0,0,0,0,18.163317,8.429972,0,0,0,0,4.848960,0,0,31.442249,0
-c28,0,0,0,0,0,0,18.163317,8.429972,0,0,0,4.848960,4.848960,0,0.089271,36.38048,0
-c29,0,0,0,0,0,0,18.163317,8.429972,0,0,0,0,4.848960,0.281905,0,31.724154,0
-c30,0,0,0,0,9.597736,18.163317,0,0,19.033555,19.033555,4.848960,0,4.848960,0.281905,0.089271,75.897259,0
-c31,0,0,0,21.882908,0,0,0,0,0,19.033555,0,4.848960,0,0.281905,0.089271,46.136599,0
-c32,0,0,0,21.882908,0,18.163317,0,0,0,19.033555,0,4.848960,4.848960,0.281905,0.089271,69.148876,0
-c33,0,0,0,21.882908,0,0,0,0,0,19.033555,0,4.848960,0,0.281905,0.089271,46.136599,0
-c34,0,0,0,0,9.597736,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0,75.807988,0
-c35,0,0,0,0,9.597736,0,18.163317,8.429972,0,0,0,4.848960,0,0.281905,0,41.32189,0
-c36,0,0,0,0,0,0,0,8.429972,0,0,4.848960,0,4.848960,0.281905,0,18.409797,0
-c37,0,0,0,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0.281905,0.089271,79.61388,0
-c38,0,0,0,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,4.848960,0.281905,0,79.524609,0
-c39,0,0,0,0,9.597736,18.163317,0,0,19.033555,0,4.848960,0,4.848960,0,0,56.492528,0
-c40,0,0,0,0,0,18.163317,18.163317,0,19.033555,19.033555,0,4.848960,0,0.281905,0.089271,79.61388,0
-c41,0,0,0,0,0,0,0,8.429972,19.033555,0,0,0,4.848960,0.281905,0,32.594392,0
-c42,0,0,0,0,0,18.163317,18.163317,0,19.033555,19.033555,0,0,0,0.281905,0,74.675649,0
-c43,0,0,0,0,9.597736,18.163317,0,0,19.033555,0,0,0,4.848960,0,0,51.643568,0
-c44,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,4.848960,4.848960,4.848960,0.281905,0,71.059212,0
-c45,0,0,0,0,0,18.163317,18.163317,0,19.033555,0,4.848960,4.848960,4.848960,0.281905,0,70.188974,0
-c46,0,0,0,21.882908,0,18.163317,0,0,0,0,0,0,4.848960,0.281905,0,45.17709,0
-c47,0,0,0,0,0,18.163317,18.163317,0,19.033555,0,0,0,0,0.281905,0.089271,55.731365,0
-c48,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0.281905,0.089271,66.299523,0
-c49,0,0,0,0,0,18.163317,18.163317,0,0,19.033555,0,4.848960,0,0,0,60.209149,0
-c50,0,0,0,0,9.597736,0,0,0,19.033555,19.033555,0,0,4.848960,0.281905,0,52.795711,0
-c51,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,0,4.848960,0,0,61.079387,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,236.022,0,0,0,49.5859,0,0,0,0,0,0,6.49192,292.09982,0
-c2,0,0,0,236.022,0,0,0,0,0,0,0,12.2454,33.1375,0,0,281.4049,0
-c3,0,0,0,236.022,0,0,0,0,63.9065,94.4725,0,12.2454,0,0,0,406.6464,0
-c4,16.0868,0,0,236.022,0,0,0,0,0,0,0,0,0,0,0,252.1088,0
-c5,16.0868,0,203.92,0,0,0,0,49.5859,0,0,0,0,12.0417,0,0,281.6344,0
-c6,0,0,124.01,236.022,70.9043,0,0,0,0,0,0,0,0,0,0,430.9363,0
-c7,0,0,0,236.022,0,0,0,49.5859,0,0,0,12.2454,0,0,0,297.8533,0
-c8,0,0,124.01,0,0,0,0,0,0,0,0,0,0,0,0,124.01,0
-c9,0,0,0,236.022,0,0,0,0,0,0,0,12.2454,0,0,0,248.2674,0
-c10,0,0,0,236.022,0,0,0,0,0,0,0,12.2454,0,0,0,248.2674,0
-c11,0,0,0,236.022,0,0,152.501,0,0,0,0,12.2454,0,0,0,400.7684,0
-c12,0,0,124.01,236.022,0,0,0,0,0,0,12.8278,0,0,0,0,372.8598,0
-c13,0,0,124.01,0,0,0,152.501,59.0058,0,0,0,0,0,0,0,335.5168,0
-c14,0,397.898,203.92,253.818,0,122.714,0,0,0,0,0,0,0,0,0,978.35,0
-c15,16.0868,0,203.92,253.818,0,0,0,0,0,0,0,0,0,0,0,473.8248,0
-c16,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c17,16.0868,0,203.92,253.818,110.25,144.6,0,0,0,65.7062,0,0,0,0,0,794.381,0
-c18,0,0,0,236.022,0,0,0,0,0,0,0,0,0,0,0,236.022,0
-c19,0,397.898,0,0,110.25,144.6,0,0,0,0,0,0,0,0,0,652.748,0
-c20,0,397.898,0,236.022,0,0,0,0,0,0,0,0,0,0,0,633.92,0
-c21,0,397.898,203.92,253.818,110.25,144.6,0,0,0,0,0,0,12.0417,0,0,1122.5277,0
-c22,0,0,124.01,0,0,0,0,0,0,0,0,12.2454,0,0,0,136.2554,0
-c23,16.0868,0,0,236.022,70.9043,0,0,0,0,0,0,0,0,0,6.49192,329.50502,0
-c24,0,397.898,0,236.022,70.9043,0,0,0,0,0,0,0,0,0,0,704.8243,0
-c25,16.0868,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0183427,16.1051427,0
-c26,0,397.898,0,0,0,122.714,0,0,0,0,0,12.2454,0,0,0,532.8574,0
-c27,0,397.898,0,0,0,122.714,0,0,0,0,0,12.2454,0,0,6.49192,539.34932,0
-c28,0,397.898,0,236.022,0,144.6,0,0,0,0,0,0,0,0,0,778.52,0
-c29,0,397.898,0,0,0,122.714,0,0,0,0,0,12.2454,0,0,0,532.8574,0
-c30,0,0,0,253.818,0,0,0,0,0,0,0,0,0,0,0,253.818,0
-c31,16.0868,436.945,0,0,0,144.6,124.457,0,0,0,0,0,0,0,0,722.0888,0
-c32,16.0868,436.945,0,0,0,0,0,59.0058,90.1067,0,0,0,0,0,0,602.1443,0
-c33,16.0868,436.945,124.01,0,0,144.6,124.457,0,0,0,0,0,0,0,0,846.0988,0
-c34,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c35,16.0868,436.945,124.01,0,0,0,0,0,0,0,37.3658,0,0,0,0,614.4076,0
-c36,16.0868,0,0,0,0,0,124.457,0,0,65.7062,0,0,0,0,0,206.25,0
-c37,16.0868,436.945,124.01,236.022,0,0,0,0,0,0,0,12.2454,0,0,0,825.3092,0
-c38,0,0,124.01,236.022,70.9043,0,0,0,0,0,0,12.2454,0,0,0,443.1817,0
-c39,0,0,0,253.818,0,0,0,0,0,0,0,0,0,0,0,253.818,0
-c40,16.0868,436.945,0,253.818,0,0,0,0,0,0,0,0,0,0,0,706.8498,0
-c41,0,0,124.01,236.022,0,144.6,0,0,0,0,0,0,0,0,0,504.632,0
-c42,16.0868,436.945,124.01,236.022,70.9043,0,0,0,0,0,0,12.2454,33.1375,0,0,929.351,0
-c43,0,0,124.01,236.022,0,0,0,0,0,0,37.3658,12.2454,0,0,6.49192,416.13512,0
-c44,0,0,124.01,236.022,70.9043,0,0,0,0,0,0,0,0,0,0,430.9363,0
-c45,0,397.898,0,236.022,70.9043,0,0,0,0,0,0,0,0,0,0,704.8243,0
-c46,0,0,124.01,0,0,0,0,59.0058,90.1067,0,0,12.2454,0,0,0,285.3679,0
-c47,16.0868,436.945,124.01,0,110.25,0,0,0,0,0,12.8278,34.3,0,0,0,734.4196,0
-c48,0,0,124.01,236.022,0,0,0,0,0,0,0,0,0,0,0,360.032,0
-c49,16.0868,436.945,124.01,236.022,70.9043,0,0,0,0,0,0,0,0,0,0,883.9681,0
-c50,0,397.898,0,236.022,0,0,152.501,0,0,0,0,12.2454,0,0,0,798.6664,0
-c51,0,397.898,0,236.022,0,0,0,0,0,0,0,12.2454,0,0,0,646.1654,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,41.5238,0,0,0,9.68469,0,0,0,0,0,0,1.76803,52.97652,0
-c2,0,0,0,41.5238,0,0,0,0,0,0,0,2.76929,6.20782,0,0,50.50091,0
-c3,0,0,0,41.5238,0,0,0,0,10.5851,16.267,0,2.76929,0,0,0,71.14519,0
-c4,7.62006,0,0,41.5238,0,0,0,0,0,0,0,0,0,0,0,49.14386,0
-c5,7.62006,0,41.6322,0,0,0,0,9.68469,0,0,0,0,2.76826,0,0,61.70521,0
-c6,0,0,28.5399,41.5238,14.8008,0,0,0,0,0,0,0,0,0,0,84.8645,0
-c7,0,0,0,41.5238,0,0,0,9.68469,0,0,0,2.76929,0,0,0,53.97778,0
-c8,0,0,28.5399,0,0,0,0,0,0,0,0,0,0,0,0,28.5399,0
-c9,0,0,0,41.5238,0,0,0,0,0,0,0,2.76929,0,0,0,44.29309,0
-c10,0,0,0,41.5238,0,0,0,0,0,0,0,2.76929,0,0,0,44.29309,0
-c11,0,0,0,41.5238,0,0,28.2053,0,0,0,0,2.76929,0,0,0,72.49839,0
-c12,0,0,28.5399,41.5238,0,0,0,0,0,0,2.77964,0,0,0,0,72.84334,0
-c13,0,0,28.5399,0,0,0,28.2053,10.5597,0,0,0,0,0,0,0,67.3049,0
-c14,0,109.635,41.6322,52.8973,0,20.8654,0,0,0,0,0,0,0,0,0,225.0299,0
-c15,7.62006,0,41.6322,52.8973,0,0,0,0,0,0,0,0,0,0,0,102.14956,0
-c16,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c17,7.62006,0,41.6322,52.8973,20.8418,28.103,0,0,0,10.5438,0,0,0,0,0,161.63816,0
-c18,0,0,0,41.5238,0,0,0,0,0,0,0,0,0,0,0,41.5238,0
-c19,0,109.635,0,0,20.8418,28.103,0,0,0,0,0,0,0,0,0,158.5798,0
-c20,0,109.635,0,41.5238,0,0,0,0,0,0,0,0,0,0,0,151.1588,0
-c21,0,109.635,41.6322,52.8973,20.8418,28.103,0,0,0,0,0,0,2.76826,0,0,255.87756,0
-c22,0,0,28.5399,0,0,0,0,0,0,0,0,2.76929,0,0,0,31.30919,0
-c23,7.62006,0,0,41.5238,14.8008,0,0,0,0,0,0,0,0,0,1.76803,65.71269,0
-c24,0,109.635,0,41.5238,14.8008,0,0,0,0,0,0,0,0,0,0,165.9596,0
-c25,7.62006,0,0,0,0,0,0,0,0,0,0,0,0,0,0.133188,7.753248,0
-c26,0,109.635,0,0,0,20.8654,0,0,0,0,0,2.76929,0,0,0,133.26969,0
-c27,0,109.635,0,0,0,20.8654,0,0,0,0,0,2.76929,0,0,1.76803,135.03772,0
-c28,0,109.635,0,41.5238,0,28.103,0,0,0,0,0,0,0,0,0,179.2618,0
-c29,0,109.635,0,0,0,20.8654,0,0,0,0,0,2.76929,0,0,0,133.26969,0
-c30,0,0,0,52.8973,0,0,0,0,0,0,0,0,0,0,0,52.8973,0
-c31,7.62006,83.3307,0,0,0,28.103,20.8436,0,0,0,0,0,0,0,0,139.89736,0
-c32,7.62006,83.3307,0,0,0,0,0,10.5597,16.2347,0,0,0,0,0,0,117.74516,0
-c33,7.62006,83.3307,28.5399,0,0,28.103,20.8436,0,0,0,0,0,0,0,0,168.43726,0
-c34,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c35,7.62006,83.3307,28.5399,0,0,0,0,0,0,0,6.79928,0,0,0,0,126.28994,0
-c36,7.62006,0,0,0,0,0,20.8436,0,0,10.5438,0,0,0,0,0,39.00746,0
-c37,7.62006,83.3307,28.5399,41.5238,0,0,0,0,0,0,0,2.76929,0,0,0,163.78375,0
-c38,0,0,28.5399,41.5238,14.8008,0,0,0,0,0,0,2.76929,0,0,0,87.63379,0
-c39,0,0,0,52.8973,0,0,0,0,0,0,0,0,0,0,0,52.8973,0
-c40,7.62006,83.3307,0,52.8973,0,0,0,0,0,0,0,0,0,0,0,143.84806,0
-c41,0,0,28.5399,41.5238,0,28.103,0,0,0,0,0,0,0,0,0,98.1667,0
-c42,7.62006,83.3307,28.5399,41.5238,14.8008,0,0,0,0,0,0,2.76929,6.20782,0,0,184.79237,0
-c43,0,0,28.5399,41.5238,0,0,0,0,0,0,6.79928,2.76929,0,0,1.76803,81.4003,0
-c44,0,0,28.5399,41.5238,14.8008,0,0,0,0,0,0,0,0,0,0,84.8645,0
-c45,0,109.635,0,41.5238,14.8008,0,0,0,0,0,0,0,0,0,0,165.9596,0
-c46,0,0,28.5399,0,0,0,0,10.5597,16.2347,0,0,2.76929,0,0,0,58.10359,0
-c47,7.62006,83.3307,28.5399,0,20.8418,0,0,0,0,0,2.77964,6.35668,0,0,0,149.46878,0
-c48,0,0,28.5399,41.5238,0,0,0,0,0,0,0,0,0,0,0,70.0637,0
-c49,7.62006,83.3307,28.5399,41.5238,14.8008,0,0,0,0,0,0,0,0,0,0,175.81526,0
-c50,0,109.635,0,41.5238,0,0,28.2053,0,0,0,0,2.76929,0,0,0,182.13339,0
-c51,0,109.635,0,41.5238,0,0,0,0,0,0,0,2.76929,0,0,0,153.92809,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,123.468921,426.5549,1201.1171,558.563,100.738165,971.2723,300.36926,527.54432,543.5573,208.37216,26.069312,26.069312,12.419968,3.437643,5382.019761,1.765958209644
-c2,352.4661,123.468921,426.5549,1201.1171,558.563,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,346.99177,216.53911,1.204529,1.669613,4622.811268,2.05598312399062
-c3,352.4661,123.468921,426.5549,1201.1171,558.563,100.738165,590.3019,290.68457,868.49843,559.8243,208.37216,346.99177,26.069312,1.204529,1.669613,5656.52477,1.68025817554657
-c4,450.60246,123.468921,426.5549,1201.1171,51.115816,100.738165,590.3019,290.68457,102.069715,543.5573,208.37216,26.069312,26.069312,1.204529,1.669613,4143.595773,2.29376185547716
-c5,450.60246,1610.85,743.3577,104.457756,51.115816,100.738165,971.2723,300.36926,102.069715,102.069715,26.069312,206.54856,354.71169,1.204529,0.107291,5125.544269,1.85432443341707
-c6,352.4661,1550.099,455.0948,1201.1171,348.5796,544.1687,590.3019,49.948052,102.069715,102.069715,26.069312,26.069312,26.069312,7.640322,1.669613,5383.432553,1.76549476340222
-c7,352.4661,123.468921,426.5549,1201.1171,51.115816,100.738165,971.2723,300.36926,102.069715,543.5573,208.37216,346.99177,26.069312,1.204529,0.107291,4755.474639,1.99862740938429
-c8,352.4661,1550.099,455.0948,104.457756,51.115816,925.6073,971.2723,490.07014,102.069715,102.069715,208.37216,26.069312,26.069312,12.419968,0.107291,5377.360685,1.76748827873189
-c9,352.4661,123.468921,426.5549,1201.1171,51.115816,100.738165,590.3019,290.68457,102.069715,543.5573,208.37216,346.99177,26.069312,1.204529,1.669613,4366.381871,2.17672714414934
-c10,352.4661,123.468921,426.5549,1201.1171,51.115816,100.738165,590.3019,290.68457,102.069715,543.5573,208.37216,346.99177,26.069312,1.204529,1.669613,4366.381871,2.17672714414934
-c11,352.4661,123.468921,426.5549,1201.1171,51.115816,925.6073,618.5072,290.68457,527.54432,102.069715,208.37216,346.99177,26.069312,1.204529,1.669613,5203.443326,1.82656394619558
-c12,352.4661,1550.099,455.0948,1201.1171,51.115816,100.738165,590.3019,49.948052,102.069715,543.5573,346.89143,344.22248,351.94343,1.204529,1.669613,6042.43943,1.57294452196198
-c13,352.4661,1550.099,455.0948,104.457756,51.115816,925.6073,618.5072,500.62984,102.069715,543.5573,26.069312,344.22248,26.069312,1.204529,1.669613,5602.840073,1.6963578943054
-c14,352.4661,1720.485,743.3577,705.8965,333.7788,946.4727,971.2723,49.948052,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,6289.639186,1.51112356779438
-c15,450.60246,1610.85,743.3577,705.8965,51.115816,925.6073,100.738165,490.07014,102.069715,543.5573,26.069312,26.069312,26.069312,1.204529,0.107291,5803.384852,1.63773767148163
-c16,352.4661,1720.485,426.5549,652.9992,51.115816,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,1.04789,4162.684549,2.28324337763209
-c17,450.60246,1610.85,743.3577,705.8965,579.4048,572.2717,100.738165,49.948052,527.54432,893.91043,26.069312,206.54856,26.069312,1.204529,1.669613,6496.085453,1.46309990539005
-c18,352.4661,123.468921,426.5549,1201.1171,51.115816,100.738165,590.3019,290.68457,527.54432,543.5573,208.37216,206.54856,26.069312,1.204529,0.107291,4649.850944,2.04402723185384
-c19,352.4661,1720.485,426.5549,652.9992,579.4048,572.2717,100.738165,490.07014,102.069715,543.5573,208.37216,26.069312,26.069312,7.640322,0.107291,5808.875417,1.63618967736265
-c20,352.4661,1720.485,426.5549,1201.1171,51.115816,925.6073,100.738165,490.07014,102.069715,883.36663,26.069312,26.069312,26.069312,1.204529,1.669613,6334.672944,1.50038085501544
-c21,352.4661,1720.485,743.3577,705.8965,579.4048,572.2717,100.738165,49.948052,102.069715,883.36663,26.069312,206.54856,354.71169,1.204529,0.107291,6398.645744,1.4853802491526
-c22,352.4661,1550.099,455.0948,104.457756,51.115816,100.738165,590.3019,290.68457,102.069715,543.5573,208.37216,346.99177,26.069312,1.204529,1.669613,4724.892506,2.01156363764345
-c23,450.60246,1610.85,426.5549,1201.1171,348.5796,544.1687,100.738165,49.948052,527.54432,543.5573,208.37216,26.069312,26.069312,12.419968,3.437643,6080.028992,1.56321984881713
-c24,352.4661,1720.485,426.5549,1201.1171,348.5796,544.1687,100.738165,49.948052,102.069715,543.5573,208.37216,26.069312,26.069312,12.419968,0.107291,5662.722675,1.67841911667661
-c25,450.60246,1610.85,426.5549,652.9992,51.115816,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,206.54856,210.33129,7.640322,1.181078,4522.496116,2.10158764188118
-c26,352.4661,1720.485,426.5549,652.9992,333.7788,946.4727,100.738165,49.948052,527.54432,543.5573,208.37216,346.99177,26.069312,1.204529,1.669613,6238.851921,1.52342484258451
-c27,352.4661,1720.485,426.5549,652.9992,333.7788,946.4727,100.738165,49.948052,527.54432,543.5573,208.37216,346.99177,26.069312,12.419968,3.437643,6251.83539,1.5202610774392
-c28,352.4661,1720.485,426.5549,1201.1171,558.563,572.2717,100.738165,49.948052,527.54432,543.5573,208.37216,26.069312,26.069312,12.419968,0.107291,6326.28368,1.50237050510561
-c29,352.4661,1720.485,426.5549,652.9992,333.7788,946.4727,100.738165,49.948052,527.54432,543.5573,208.37216,346.99177,26.069312,1.204529,1.669613,6238.851921,1.52342484258451
-c30,352.4661,1550.099,701.7255,705.8965,51.115816,100.738165,590.3019,290.68457,102.069715,102.069715,26.069312,344.22248,26.069312,1.204529,0.107291,4944.839905,1.9220889145836
-c31,450.60246,1633.4297,701.7255,104.457756,558.563,572.2717,992.1159,490.07014,857.91333,102.069715,344.11179,26.069312,210.33129,1.204529,0.107291,7045.043413,1.34909346414423
-c32,450.60246,1633.4297,701.7255,104.457756,333.7788,100.738165,590.3019,500.62984,543.77902,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,5323.33546,1.78542608311545
-c33,450.60246,1633.4297,455.0948,104.457756,558.563,572.2717,992.1159,490.07014,857.91333,102.069715,344.11179,26.069312,210.33129,1.204529,0.107291,6798.412713,1.39803545613258
-c34,352.4661,1720.485,426.5549,652.9992,51.115816,100.738165,971.2723,490.07014,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,1.04789,5232.604254,1.81638463659773
-c35,450.60246,1633.4297,455.0948,652.9992,51.115816,925.6073,100.738165,49.948052,857.91333,883.36663,215.17144,26.069312,210.33129,1.204529,1.04789,6514.639914,1.45893282477235
-c36,450.60246,1610.85,426.5549,652.9992,333.7788,544.1687,992.1159,49.948052,527.54432,893.91043,26.069312,206.54856,26.069312,1.204529,1.669613,6744.034088,1.40930812226778
-c37,450.60246,1633.4297,455.0948,1201.1171,558.563,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,346.99177,26.069312,1.204529,0.107291,5577.852452,1.70395722536482
-c38,352.4661,1550.099,455.0948,1201.1171,348.5796,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,346.99177,26.069312,1.204529,1.04789,5187.342591,1.83223332718891
-c39,352.4661,1550.099,701.7255,705.8965,51.115816,100.738165,971.2723,490.07014,102.069715,543.5573,26.069312,344.22248,26.069312,12.419968,1.04789,5978.839498,1.5896767260957
-c40,450.60246,1633.4297,701.7255,705.8965,333.7788,100.738165,100.738165,490.07014,102.069715,102.069715,208.37216,26.069312,210.33129,1.204529,0.107291,5167.203442,1.83937444707689
-c41,352.4661,1550.099,455.0948,1201.1171,558.563,572.2717,590.3019,49.948052,102.069715,543.5573,208.37216,206.54856,26.069312,1.204529,1.669613,6419.352841,1.48058881406814
-c42,450.60246,1633.4297,455.0948,1201.1171,348.5796,100.738165,100.738165,290.68457,102.069715,102.069715,208.37216,346.99177,216.53911,1.204529,1.04789,5559.279449,1.70964997788421
-c43,352.4661,1550.099,455.0948,1201.1171,51.115816,100.738165,590.3019,290.68457,102.069715,883.36663,215.17144,346.99177,26.069312,12.419968,3.437643,6181.143929,1.53764774181094
-c44,352.4661,1550.099,455.0948,1201.1171,348.5796,100.738165,590.3019,290.68457,102.069715,102.069715,26.069312,26.069312,26.069312,1.204529,1.669613,5174.302743,1.83685076934721
-c45,352.4661,1720.485,426.5549,1201.1171,348.5796,100.738165,100.738165,290.68457,102.069715,543.5573,26.069312,26.069312,26.069312,1.204529,1.669613,5268.072693,1.80415543434196
-c46,352.4661,1550.099,455.0948,104.457756,558.563,100.738165,590.3019,500.62984,543.77902,543.5573,208.37216,346.99177,26.069312,1.204529,1.669613,5883.994265,1.61530102994924
-c47,450.60246,1633.4297,455.0948,652.9992,579.4048,100.738165,100.738165,490.07014,102.069715,543.5573,346.89143,212.90524,210.33129,1.204529,0.107291,5880.144225,1.61635865255739
-c48,352.4661,1550.099,455.0948,1201.1171,558.563,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,1.204529,0.107291,5565.026669,1.70788435573112
-c49,450.60246,1633.4297,455.0948,1201.1171,348.5796,100.738165,100.738165,490.07014,857.91333,102.069715,208.37216,26.069312,210.33129,7.640322,1.669613,6194.435872,1.53434827657623
-c50,352.4661,1720.485,426.5549,1201.1171,51.115816,925.6073,618.5072,290.68457,102.069715,102.069715,208.37216,346.99177,26.069312,1.204529,1.669613,6374.9848,1.49089328164526
-c51,352.4661,1720.485,426.5549,1201.1171,558.563,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,346.99177,26.069312,7.640322,1.669613,6035.793342,1.57467651093949
-c4,4143.595773
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c30,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c40,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c50,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_results1.csv b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_results1.csv
deleted file mode 100644
index a03d6b7ac070f7afab0e1cd4a25ba2bd3913415d..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_results1.csv
+++ /dev/null
@@ -1,187 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,3427.0759,1299.926,1273.3007,1286.6509,57.16078,10.00372,33166.3812,1.76426503433463
-c2,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,157.113483,78.556742,3203.344,3427.0759,39.466907,39.466907,1286.6509,57.16078,10.00372,22961.566622,2.54835776458303
-c3,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,157.113483,1299.926,39.466907,1286.6509,57.16078,10.00372,28662.58499,2.041486720729
-c4,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,157.113483,1299.926,39.466907,1286.6509,57.16078,10.00372,28662.58499,2.041486720729
-c5,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,3427.0759,1299.926,1273.3007,1286.6509,57.16078,10.00372,33166.3812,1.76426503433463
-c6,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,157.113483,78.556742,3203.344,3427.0759,39.466907,39.466907,1286.6509,57.16078,10.00372,22961.566622,2.54835776458303
-c7,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,157.113483,1299.926,39.466907,1286.6509,57.16078,10.00372,28662.58499,2.041486720729
-c8,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,157.113483,1299.926,39.466907,1286.6509,57.16078,10.00372,28662.58499,2.041486720729
-c9,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,3427.0759,1299.926,1273.3007,1286.6509,57.16078,10.00372,33166.3812,1.76426503433463
-c10,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,157.113483,1299.926,39.466907,1286.6509,57.16078,10.00372,28662.58499,2.041486720729
-c11,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,157.113483,78.556742,3203.344,3427.0759,39.466907,39.466907,1286.6509,57.16078,10.00372,22961.566622,2.54835776458303
-c12,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,157.113483,1299.926,39.466907,1286.6509,57.16078,10.00372,28662.58499,2.041486720729
-c2,22961.566622
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,543.5573,208.37216,206.54856,210.33129,7.640322,1.669613,6507.467735,1.46054078160495
-c2,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,82.574848,41.518080,527.54432,543.5573,21.220352,21.220352,210.33129,7.640322,1.669613,4916.500325,1.93316817581685
-c3,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,83.036160,208.37216,21.220352,210.33129,7.640322,1.669613,5861.618387,1.6214672072362
-c4,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,83.036160,208.37216,21.220352,210.33129,7.640322,1.669613,5861.618387,1.6214672072362
-c5,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,543.5573,208.37216,206.54856,210.33129,7.640322,1.669613,6507.467735,1.46054078160495
-c6,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,82.574848,41.518080,527.54432,543.5573,21.220352,21.220352,210.33129,7.640322,1.669613,4916.500325,1.93316817581685
-c7,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,83.036160,208.37216,21.220352,210.33129,7.640322,1.669613,5861.618387,1.6214672072362
-c8,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,83.036160,208.37216,21.220352,210.33129,7.640322,1.669613,5861.618387,1.6214672072362
-c9,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,543.5573,208.37216,206.54856,210.33129,7.640322,1.669613,6507.467735,1.46054078160495
-c10,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,83.036160,208.37216,21.220352,210.33129,7.640322,1.669613,5861.618387,1.6214672072362
-c11,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,82.574848,41.518080,527.54432,543.5573,21.220352,21.220352,210.33129,7.640322,1.669613,4916.500325,1.93316817581685
-c12,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,83.036160,208.37216,21.220352,210.33129,7.640322,1.669613,5861.618387,1.6214672072362
-c2,4916.500325
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,3427.0759,1299.926,1273.3007,1286.6509,57.16078,10.00372,33564.2792,1.74335001565191
-c2,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,352.195669,171.815177,3203.344,3427.0759,89.882488,89.882488,1286.6509,57.16078,10.00372,23943.718591,2.44382619083286
-c3,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,357.330128,1299.926,89.882488,1286.6509,57.16078,10.00372,29311.115216,1.99631730871254
-c4,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,357.330128,1299.926,89.882488,1286.6509,57.16078,10.00372,29311.115216,1.99631730871254
-c5,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,3427.0759,1299.926,1273.3007,1286.6509,57.16078,10.00372,33564.2792,1.74335001565191
-c6,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,352.195669,171.815177,3203.344,3427.0759,89.882488,89.882488,1286.6509,57.16078,10.00372,23943.718591,2.44382619083286
-c7,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,357.330128,1299.926,89.882488,1286.6509,57.16078,10.00372,29311.115216,1.99631730871254
-c8,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,357.330128,1299.926,89.882488,1286.6509,57.16078,10.00372,29311.115216,1.99631730871254
-c9,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,3427.0759,1299.926,1273.3007,1286.6509,57.16078,10.00372,33564.2792,1.74335001565191
-c10,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,357.330128,1299.926,89.882488,1286.6509,57.16078,10.00372,29311.115216,1.99631730871254
-c11,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,352.195669,171.815177,3203.344,3427.0759,89.882488,89.882488,1286.6509,57.16078,10.00372,23943.718591,2.44382619083286
-c12,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,1683.0934,3203.344,357.330128,1299.926,89.882488,1286.6509,57.16078,10.00372,29311.115216,1.99631730871254
-c2,23943.718591
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,151.959867,151.959867,74.930729,0,0,38.454652,38.454652,0,0,0,455.759767,0
-c3,0,0,0,0,0,0,0,0,0,153.127903,0,38.454652,0,0,0,191.582555,0
-c4,0,0,0,0,0,0,0,0,0,153.127903,0,38.454652,0,0,0,191.582555,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,151.959867,151.959867,74.930729,0,0,38.454652,38.454652,0,0,0,455.759767,0
-c7,0,0,0,0,0,0,0,0,0,153.127903,0,38.454652,0,0,0,191.582555,0
-c8,0,0,0,0,0,0,0,0,0,153.127903,0,38.454652,0,0,0,191.582555,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,153.127903,0,38.454652,0,0,0,191.582555,0
-c11,0,0,0,0,0,151.959867,151.959867,74.930729,0,0,38.454652,38.454652,0,0,0,455.759767,0
-c12,0,0,0,0,0,0,0,0,0,153.127903,0,38.454652,0,0,0,191.582555,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,43.122319,43.122319,18.327706,0,0,11.960929,11.960929,0,0,0,128.494202,0
-c3,0,0,0,0,0,0,0,0,0,47.088742,0,11.960929,0,0,0,59.049671,0
-c4,0,0,0,0,0,0,0,0,0,47.088742,0,11.960929,0,0,0,59.049671,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,43.122319,43.122319,18.327706,0,0,11.960929,11.960929,0,0,0,128.494202,0
-c7,0,0,0,0,0,0,0,0,0,47.088742,0,11.960929,0,0,0,59.049671,0
-c8,0,0,0,0,0,0,0,0,0,47.088742,0,11.960929,0,0,0,59.049671,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,47.088742,0,11.960929,0,0,0,59.049671,0
-c11,0,0,0,0,0,43.122319,43.122319,18.327706,0,0,11.960929,11.960929,0,0,0,128.494202,0
-c12,0,0,0,0,0,0,0,0,0,47.088742,0,11.960929,0,0,0,59.049671,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,18.163317,18.163317,8.429972,0,0,4.848960,4.848960,0,0,0,54.454526,0
-c3,0,0,0,0,0,0,0,0,0,19.033555,0,4.848960,0,0,0,23.882515,0
-c4,0,0,0,0,0,0,0,0,0,19.033555,0,4.848960,0,0,0,23.882515,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,18.163317,18.163317,8.429972,0,0,4.848960,4.848960,0,0,0,54.454526,0
-c7,0,0,0,0,0,0,0,0,0,19.033555,0,4.848960,0,0,0,23.882515,0
-c8,0,0,0,0,0,0,0,0,0,19.033555,0,4.848960,0,0,0,23.882515,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,19.033555,0,4.848960,0,0,0,23.882515,0
-c11,0,0,0,0,0,18.163317,18.163317,8.429972,0,0,4.848960,4.848960,0,0,0,54.454526,0
-c12,0,0,0,0,0,0,0,0,0,19.033555,0,4.848960,0,0,0,23.882515,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c2,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c3,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c4,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c5,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c6,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c7,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c8,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c9,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c10,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c11,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c12,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c2,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c3,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c4,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c5,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c6,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c7,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c8,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c9,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c10,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c11,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c12,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,543.5573,208.37216,206.54856,210.33129,7.640322,1.669613,6617.102735,1.43634191503388
-c2,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,100.738165,49.948052,527.54432,543.5573,26.069312,26.069312,210.33129,7.640322,1.669613,5080.589851,1.8707319916911
-c3,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,102.069715,208.37216,26.069312,210.33129,7.640322,1.669613,5995.135902,1.58535555404069
-c4,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,102.069715,208.37216,26.069312,210.33129,7.640322,1.669613,5995.135902,1.58535555404069
-c5,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,543.5573,208.37216,206.54856,210.33129,7.640322,1.669613,6617.102735,1.43634191503388
-c6,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,100.738165,49.948052,527.54432,543.5573,26.069312,26.069312,210.33129,7.640322,1.669613,5080.589851,1.8707319916911
-c7,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,102.069715,208.37216,26.069312,210.33129,7.640322,1.669613,5995.135902,1.58535555404069
-c8,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,102.069715,208.37216,26.069312,210.33129,7.640322,1.669613,5995.135902,1.58535555404069
-c9,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,543.5573,208.37216,206.54856,210.33129,7.640322,1.669613,6617.102735,1.43634191503388
-c10,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,102.069715,208.37216,26.069312,210.33129,7.640322,1.669613,5995.135902,1.58535555404069
-c11,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,100.738165,49.948052,527.54432,543.5573,26.069312,26.069312,210.33129,7.640322,1.669613,5080.589851,1.8707319916911
-c12,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,290.68457,527.54432,102.069715,208.37216,26.069312,210.33129,7.640322,1.669613,5995.135902,1.58535555404069
-c2,5080.589851
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_results2.csv b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_results2.csv
deleted file mode 100644
index ef1faa1dec83dc5120ac7f2c1810f0cad3edbb1b..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_results2.csv
+++ /dev/null
@@ -1,187 +0,0 @@
-Compute Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,78.556742,3203.344,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,27044.698132,2.16361396755259
-c2,1108.984,6358.145,1949.97,157.113483,1731.6018,3053.2545,3366.9575,59.008124,118.016247,157.113483,1299.926,39.466907,29.645681,57.16078,10.00372,19496.367225,3.0012917723379
-c3,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,3366.9575,1683.0934,157.113483,157.113483,1299.926,39.466907,39.466907,57.16078,10.00372,21473.029463,2.725013099401
-c4,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,3366.9575,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,22706.863256,2.57694274746839
-c5,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,78.556742,3203.344,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,27044.698132,2.16361396755259
-c6,1108.984,6358.145,1949.97,157.113483,1731.6018,3053.2545,3366.9575,59.008124,118.016247,157.113483,1299.926,39.466907,29.645681,57.16078,10.00372,19496.367225,3.0012917723379
-c7,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,3366.9575,1683.0934,157.113483,157.113483,1299.926,39.466907,39.466907,57.16078,10.00372,21473.029463,2.725013099401
-c8,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,3366.9575,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,22706.863256,2.57694274746839
-c9,1108.984,6358.145,1949.97,3356.913,1731.6018,3053.2545,3366.9575,78.556742,3203.344,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,27044.698132,2.16361396755259
-c10,1108.984,6358.145,1949.97,157.113483,1731.6018,3053.2545,3366.9575,59.008124,118.016247,157.113483,1299.926,39.466907,29.645681,57.16078,10.00372,19496.367225,3.0012917723379
-c11,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,3366.9575,1683.0934,157.113483,157.113483,1299.926,39.466907,39.466907,57.16078,10.00372,21473.029463,2.725013099401
-c12,1108.984,6358.145,1949.97,3356.913,1731.6018,157.113483,3366.9575,1683.0934,157.113483,157.113483,1299.926,1273.3007,39.466907,57.16078,10.00372,22706.863256,2.57694274746839
-c2,19496.367225
-
-Compute Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,41.518080,527.54432,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,5608.669167,1.69459486832672
-c2,352.4661,1610.85,426.5549,82.574848,333.7788,544.1687,590.3019,41.518080,83.036160,83.036160,208.37216,21.220352,21.220352,7.640322,1.669613,4408.408447,2.15597580321087
-c3,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,7.640322,1.669613,4766.405437,1.99404395707003
-c4,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,4951.733645,1.9194130071305
-c5,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,41.518080,527.54432,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,5608.669167,1.69459486832672
-c6,352.4661,1610.85,426.5549,82.574848,333.7788,544.1687,590.3019,41.518080,83.036160,83.036160,208.37216,21.220352,21.220352,7.640322,1.669613,4408.408447,2.15597580321087
-c7,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,7.640322,1.669613,4766.405437,1.99404395707003
-c8,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,4951.733645,1.9194130071305
-c9,352.4661,1610.85,426.5549,652.9992,333.7788,544.1687,590.3019,41.518080,527.54432,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,5608.669167,1.69459486832672
-c10,352.4661,1610.85,426.5549,82.574848,333.7788,544.1687,590.3019,41.518080,83.036160,83.036160,208.37216,21.220352,21.220352,7.640322,1.669613,4408.408447,2.15597580321087
-c11,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,21.220352,21.220352,7.640322,1.669613,4766.405437,1.99404395707003
-c12,352.4661,1610.85,426.5549,652.9992,333.7788,82.574848,590.3019,290.68457,83.036160,83.036160,208.37216,206.54856,21.220352,7.640322,1.669613,4951.733645,1.9194130071305
-c2,4408.408447
-
-Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,1108.984,7471.505,3792.343,6973.454,3475.891,6007.756,6530.41,3324.8076,6053.9348,6493.6098,2479.9481,2376.5679,2353.8847,69.37799,1.812953,58514.286843,0.999999998291016
-c1,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,171.815177,3203.344,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,27786.486793,2.10585408181777
-c2,1108.984,6756.043,1949.97,375.615141,1731.6018,3053.2545,3366.9575,152.266559,318.232892,357.330128,1299.926,89.882488,80.061262,57.16078,10.00372,20707.28977,2.82578199321841
-c3,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,3366.9575,1683.0934,357.330128,357.330128,1299.926,89.882488,89.882488,57.16078,10.00372,22567.274101,2.59288234466558
-c4,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,3366.9575,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,23750.692313,2.46368761910167
-c5,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,171.815177,3203.344,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,27786.486793,2.10585408181777
-c6,1108.984,6756.043,1949.97,375.615141,1731.6018,3053.2545,3366.9575,152.266559,318.232892,357.330128,1299.926,89.882488,80.061262,57.16078,10.00372,20707.28977,2.82578199321841
-c7,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,3366.9575,1683.0934,357.330128,357.330128,1299.926,89.882488,89.882488,57.16078,10.00372,22567.274101,2.59288234466558
-c8,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,3366.9575,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,23750.692313,2.46368761910167
-c9,1108.984,6756.043,1949.97,3356.913,1731.6018,3053.2545,3366.9575,171.815177,3203.344,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,27786.486793,2.10585408181777
-c10,1108.984,6756.043,1949.97,375.615141,1731.6018,3053.2545,3366.9575,152.266559,318.232892,357.330128,1299.926,89.882488,80.061262,57.16078,10.00372,20707.28977,2.82578199321841
-c11,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,3366.9575,1683.0934,357.330128,357.330128,1299.926,89.882488,89.882488,57.16078,10.00372,22567.274101,2.59288234466558
-c12,1108.984,6756.043,1949.97,3356.913,1731.6018,352.195669,3366.9575,1683.0934,357.330128,357.330128,1299.926,1273.3007,89.882488,57.16078,10.00372,23750.692313,2.46368761910167
-c2,20707.28977
-
-Leakage Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,74.930729,0,153.127903,0,0,38.454652,0,0,266.513284,0
-c2,0,0,0,157.595858,0,0,0,74.930729,153.127903,153.127903,0,38.454652,38.454652,0,0,615.691697,0
-c3,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,0,0,535.124977,0
-c4,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,0,38.454652,0,0,496.670325,0
-c5,0,0,0,0,0,0,0,74.930729,0,153.127903,0,0,38.454652,0,0,266.513284,0
-c6,0,0,0,157.595858,0,0,0,74.930729,153.127903,153.127903,0,38.454652,38.454652,0,0,615.691697,0
-c7,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,0,0,535.124977,0
-c8,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,0,38.454652,0,0,496.670325,0
-c9,0,0,0,0,0,0,0,74.930729,0,153.127903,0,0,38.454652,0,0,266.513284,0
-c10,0,0,0,157.595858,0,0,0,74.930729,153.127903,153.127903,0,38.454652,38.454652,0,0,615.691697,0
-c11,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,38.454652,38.454652,0,0,535.124977,0
-c12,0,0,0,0,0,151.959867,0,0,153.127903,153.127903,0,0,38.454652,0,0,496.670325,0
-c0,0
-
-Memory Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,18.327706,0,47.088742,0,0,11.960929,0,0,77.377377,0
-c2,0,0,0,60.905800,0,0,0,18.327706,47.088742,47.088742,0,11.960929,11.960929,0,0,197.332848,0
-c3,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0,0,161.221661,0
-c4,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,0,11.960929,0,0,149.260732,0
-c5,0,0,0,0,0,0,0,18.327706,0,47.088742,0,0,11.960929,0,0,77.377377,0
-c6,0,0,0,60.905800,0,0,0,18.327706,47.088742,47.088742,0,11.960929,11.960929,0,0,197.332848,0
-c7,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0,0,161.221661,0
-c8,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,0,11.960929,0,0,149.260732,0
-c9,0,0,0,0,0,0,0,18.327706,0,47.088742,0,0,11.960929,0,0,77.377377,0
-c10,0,0,0,60.905800,0,0,0,18.327706,47.088742,47.088742,0,11.960929,11.960929,0,0,197.332848,0
-c11,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,11.960929,11.960929,0,0,161.221661,0
-c12,0,0,0,0,0,43.122319,0,0,47.088742,47.088742,0,0,11.960929,0,0,149.260732,0
-c0,0
-
-Memory Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,8.429972,0,19.033555,0,0,4.848960,0,0,32.312487,0
-c2,0,0,0,21.882908,0,0,0,8.429972,19.033555,19.033555,0,4.848960,4.848960,0,0,78.07791,0
-c3,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0,0,65.928347,0
-c4,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,0,4.848960,0,0,61.079387,0
-c5,0,0,0,0,0,0,0,8.429972,0,19.033555,0,0,4.848960,0,0,32.312487,0
-c6,0,0,0,21.882908,0,0,0,8.429972,19.033555,19.033555,0,4.848960,4.848960,0,0,78.07791,0
-c7,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0,0,65.928347,0
-c8,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,0,4.848960,0,0,61.079387,0
-c9,0,0,0,0,0,0,0,8.429972,0,19.033555,0,0,4.848960,0,0,32.312487,0
-c10,0,0,0,21.882908,0,0,0,8.429972,19.033555,19.033555,0,4.848960,4.848960,0,0,78.07791,0
-c11,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,4.848960,4.848960,0,0,65.928347,0
-c12,0,0,0,0,0,18.163317,0,0,19.033555,19.033555,0,0,4.848960,0,0,61.079387,0
-c0,0
-
-Patch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
-Quantization Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c2,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c3,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c4,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c5,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c6,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c7,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c8,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c9,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c10,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c11,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c12,0,397.898,0,0,0,0,0,0,0,0,0,0,0,0,0,397.898,0
-c0,0
-
-Quantization Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c2,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c3,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c4,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c5,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c6,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c7,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c8,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c9,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c10,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c11,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c12,0,109.635,0,0,0,0,0,0,0,0,0,0,0,0,0,109.635,0
-c0,0
-
-Time
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,352.4661,1550.099,701.7255,1159.5933,558.563,925.6073,971.2723,490.07014,857.91333,883.36663,344.11179,344.22248,351.94343,12.419968,1.04789,9504.422158,0.999999989478582
-c1,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,49.948052,527.54432,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,5750.616654,1.65276570576346
-c2,352.4661,1720.485,426.5549,104.457756,333.7788,544.1687,590.3019,49.948052,102.069715,102.069715,208.37216,26.069312,26.069312,7.640322,1.669613,4596.121357,2.06792232253231
-c3,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,7.640322,1.669613,4941.968784,1.92320558487758
-c4,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,5122.448032,1.8554452701289
-c5,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,49.948052,527.54432,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,5750.616654,1.65276570576346
-c6,352.4661,1720.485,426.5549,104.457756,333.7788,544.1687,590.3019,49.948052,102.069715,102.069715,208.37216,26.069312,26.069312,7.640322,1.669613,4596.121357,2.06792232253231
-c7,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,7.640322,1.669613,4941.968784,1.92320558487758
-c8,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,5122.448032,1.8554452701289
-c9,352.4661,1720.485,426.5549,652.9992,333.7788,544.1687,590.3019,49.948052,527.54432,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,5750.616654,1.65276570576346
-c10,352.4661,1720.485,426.5549,104.457756,333.7788,544.1687,590.3019,49.948052,102.069715,102.069715,208.37216,26.069312,26.069312,7.640322,1.669613,4596.121357,2.06792232253231
-c11,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,26.069312,26.069312,7.640322,1.669613,4941.968784,1.92320558487758
-c12,352.4661,1720.485,426.5549,652.9992,333.7788,100.738165,590.3019,290.68457,102.069715,102.069715,208.37216,206.54856,26.069312,7.640322,1.669613,5122.448032,1.8554452701289
-c2,4596.121357
-
-Unpatch Energy
-Configuration,Conv1,Conv2,Conv3,Conv4,Conv5,Conv6,Conv7,Conv8,Conv9,Conv10,Conv11,Conv12,Conv13,FC1,FC2,Total,Improvement
-c0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-c0,0
-
diff --git a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_tensors.txt b/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_tensors.txt
deleted file mode 100644
index 6c6b42b93c0446c298489429261592fe99e2f81b..0000000000000000000000000000000000000000
--- a/llvm/projects/soc_simulator/vgg16_cifar100/vgg16_tensors.txt
+++ /dev/null
@@ -1,64 +0,0 @@
-#Conv1,3
-Conv1,163.415,464.69,310.228,694.841,7.62006,16.0868,83.4767,247.452
-Add1,113.913,376.561,69.7114,211.862,314.261,894.668,83.2799,265.625
-Relu1,75.1381,267.733,63.043,222.775,116.511,379.888,83.4086,300.518
-#Conv2,4
-Conv2,1186.37,5295.79,1399.51,5412.29,109.635,397.898,83.3307,436.945
-Add2,114.041,707.602,69.1905,303.338,336.556,1525.29,83.1178,365.325
-Relu2,74.295,455.493,62.4233,282.215,106.67,463.637,83.0734,372.529
-Pool1,175.393,1012.62,79.7262,360.302,106.496,467.712,20.9488,92.2916
-#Conv3,3
-Conv3,613.82,3267.16,354.47,1585.56,28.5399,124.01,41.6322,203.92
-Add3,51.2818,306.368,40.9358,206.435,52.9835,255.846,41.6412,204.32
-Relu3,36.6237,218.815,31.1491,157.975,53.9219,262.044,41.4745,201.753
-#Conv4,4
-Conv4,982.846,5808.36,543.421,2780.23,52.8973,253.818,41.5238,236.022
-Add4,51.329,344.524,37.6205,200.601,146.069,779.141,41.3499,213.133
-Relu4,36.7139,244.573,31.1126,165.225,52.5946,268.222,41.3761,210.186
-Pool2,88.7044,575.997,40.8451,210.857,52.6109,263.231,10.5892,50.2118
-#Conv5,3
-Conv5,491.91,3047.42,278.988,1427.88,14.8008,70.9043,20.8418,110.25
-Add5,48.2779,312.538,39.1711,216.321,27.4739,144.775,20.8433,109.029
-Relu5,18.3751,115.933,15.6197,87.4008,27.1876,142.132,20.7564,107.244
-#Conv6,3
-Conv6,858.952,5554.16,490.314,2738.69,28.103,144.6,20.8654,122.714
-Add6,48.3038,331.31,38.2773,222.684,78.3804,453.985,20.7551,113.98
-Relu6,18.3515,122.286,15.5774,91.8805,26.9604,148.182,20.7925,111.946
-#Conv7,4
-Conv7,859.425,5760.38,514.632,2922.64,28.2053,152.501,20.8436,124.457
-Add7,48.1751,337.048,38.3104,225.263,81.46,477.128,20.7616,115.225
-Relu7,18.3889,125.084,15.5863,93.2475,27.0202,149.73,20.7296,113.484
-Pool3,45.2833,307.898,21.7732,125.807,27.105,148.004,5.37692,25.667
-#Conv8,3
-Conv8,434.046,2940.43,240.716,1378.49,9.68469,49.5859,10.5597,59.0058
-Add8,46.6813,323.76,42.0846,253.744,14.6636,82.9186,10.5141,57.5583
-Relu8,9.34284,60.6176,7.88397,50.8594,14.4739,80.2487,10.4677,57.0219
-#Conv9,3
-Conv9,801.893,5651.89,477.878,2885.71,16.2347,90.1067,10.5851,63.9065
-Add9,46.6775,338.552,41.8259,265.047,39.8499,247.783,10.4589,60.1816
-Relu9,9.34283,63.4928,7.84042,52.587,14.2726,83.3662,10.4563,59.3499
-#Conv10,4
-Conv10,802.661,5895.34,481.652,3018.22,16.267,94.4725,10.5438,65.7062
-Add10,46.7852,353.064,41.8232,272.933,40.0563,257.396,10.5152,61.7303
-Relu10,9.31493,65.6928,7.8553,54.9037,14.2674,85.1708,10.4698,61.6665
-Pool4,24.6055,179.513,12.2268,81.0192,14.3183,85.3146,2.76689,12.3171
-#Conv11,3
-Conv11,308.859,2238.17,173.774,1077.96,6.79928,37.3658,2.77964,12.8278
-Add11,32.7593,228.974,32.499,205.175,4.84751,25.3885,2.71712,12.3719
-Relu11,2.49349,12.8041,2.09916,16.791,4.48398,23.4935,2.65339,11.6345
-#Conv12,3
-Conv12,308.957,2143.29,172.103,1054.21,6.35668,34.3,2.76929,12.2454
-Add12,32.7648,220.92,32.3458,201.907,4.88662,25.364,2.69777,11.7633
-Relu12,2.50068,12.3579,2.09976,17.1837,4.44727,22.6715,2.64916,11.3108
-#Conv13,4
-Conv13,308.526,2076.4,171.684,1041.08,6.20782,33.1375,2.76826,12.0417
-Add13,32.723,215.055,32.2976,199.983,4.91853,25.4026,2.71496,12.0093
-Relu13,2.48762,12.2284,2.10382,16.7344,4.36041,22.0096,2.72796,11.4339
-Pool5,8.20681,50.2013,4.24587,28.8535,4.30952,21.6535,0.789328,1.12606
-#FC1,3
-Mul1,10.6715,65.8927,5.49296,37.0659,2.18449,8.22985,0.810781,1.37103
-Add14,0.902452,1.91287,0.999932,9.47008,1.88605,7.19323,0.760782,0.960605
-Relu14,0.846016,1.57242,1.14743,10.6248,1.5964,5.26066,0.762322,0.9319
-#FC2,2
-Mul2,0.821147,1.66598,1.31517,8.1931,1.76803,6.49192,0.133188,0.0183427
-Add15,0.226743,0.146973,0.354443,1.81062,0.764484,1.36953,0.120211,0.0438951
diff --git a/llvm/projects/visc-cpu-rt/CMakeLists.txt b/llvm/projects/visc-cpu-rt/CMakeLists.txt
deleted file mode 100644
index 4887abbaf9be21d0fd69811789c0a40a52a23024..0000000000000000000000000000000000000000
--- a/llvm/projects/visc-cpu-rt/CMakeLists.txt
+++ /dev/null
@@ -1,43 +0,0 @@
-add_custom_target(visc-cpu-rt ALL)
-add_custom_command(
-  TARGET visc-cpu-rt PRE_BUILD
-  COMMAND ${CMAKE_COMMAND} -E copy
-    ${CMAKE_CURRENT_SOURCE_DIR}/deviceStatusSwitchIntervals.txt
-    ${CMAKE_CURRENT_BINARY_DIR}/deviceStatusSwitchIntervals.txt
-  DEPENDS deviceStatusSwitchIntervals.txt
-  COMMENT "Copying deviceStatusSwitchIntervals.txt")
-add_custom_command(
-  TARGET visc-cpu-rt PRE_BUILD
-  COMMAND ${CMAKE_COMMAND} -E copy
-    ${CMAKE_CURRENT_SOURCE_DIR}/device_abstraction.h
-    ${CMAKE_CURRENT_BINARY_DIR}/device_abstraction.h
-  DEPENDS device_abstraction.h
-  COMMENT "Copying device_abstraction.h")
-add_custom_command(
-  TARGET visc-cpu-rt PRE_BUILD
-  COMMAND ${CMAKE_COMMAND} -E copy
-    ${CMAKE_CURRENT_SOURCE_DIR}/policy.h
-    ${CMAKE_CURRENT_BINARY_DIR}/policy.h
-  DEPENDS policy.h
-  COMMENT "Copying policy.h")
-add_custom_command(
-  TARGET visc-cpu-rt PRE_BUILD
-  COMMAND ${CMAKE_COMMAND} -E copy
-    ${CMAKE_CURRENT_SOURCE_DIR}/visc-rt.h
-    ${CMAKE_CURRENT_BINARY_DIR}/visc-rt.h
-  DEPENDS visc-rt.h
-  COMMENT "Copying visc-rt.h")
-add_custom_command(
-  TARGET visc-cpu-rt PRE_BUILD
-  COMMAND ${CMAKE_COMMAND} -E copy
-    ${CMAKE_CURRENT_SOURCE_DIR}/visc-rt.cpp
-    ${CMAKE_CURRENT_BINARY_DIR}/visc-rt.cpp
-  DEPENDS visc-rt.cpp
-  COMMENT "Copying visc-rt.cpp")
-add_custom_command(
-  TARGET visc-cpu-rt PRE_BUILD
-  COMMAND ${CMAKE_COMMAND} -E copy
-    ${CMAKE_CURRENT_SOURCE_DIR}/makefile
-    ${CMAKE_CURRENT_BINARY_DIR}/makefile
-  DEPENDS makefile
-  COMMENT "Copying makefile")
diff --git a/llvm/projects/visc-cpu-rt/deviceStatusSwitchIntervals.txt b/llvm/projects/visc-cpu-rt/deviceStatusSwitchIntervals.txt
deleted file mode 100644
index 7069470a1a6f8b1a49eea2824f27204ebdf3fb26..0000000000000000000000000000000000000000
--- a/llvm/projects/visc-cpu-rt/deviceStatusSwitchIntervals.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-10
-10 15 10 16 15 30 15 25 20 15
diff --git a/llvm/projects/visc-cpu-rt/device_abstraction.h b/llvm/projects/visc-cpu-rt/device_abstraction.h
deleted file mode 100644
index 68748c7ab73d316c7bf296e67d88c0114b4cac81..0000000000000000000000000000000000000000
--- a/llvm/projects/visc-cpu-rt/device_abstraction.h
+++ /dev/null
@@ -1,82 +0,0 @@
-#ifndef __DEVICE_ABSTRACTION__
-#define __DEVICE_ABSTRACTION__
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <time.h>
-#include <time.h>
-#include <thread>
-#include <vector>
-#include <iostream>
-#include <fstream>
-
-#define MIN_INTERVAL 2
-#define MAX_INTERVAL 8
-#define NUM_INTERVALS 10
-
-// Device status variable: true if the device is available for use
-volatile bool deviceStatus = true;
-// Intervals at which to change the device status
-std::vector<unsigned> Intervals;
-
-// Set to true when program execution ends and so we can end the device
-// simulation
-volatile bool executionEnd = false;
-
-
-void initializeDeviceStatusIntervals() {
-
-  unsigned sz = 0;
-  unsigned tmp = 0;
-
-  const char *fn =
-    "/home/kotsifa2/HPVM/hpvm/build/projects/visc-rt/deviceStatusSwitchIntervals.txt";
-  std::ifstream infile;
-  infile.open(fn);
-  if (!infile.is_open()) {
-    std::cout << "Failed to open " << fn << " for reading\n";
-    return;
-  }
-  infile >> sz;
-
-  if (sz) {
-    // We have data. Read them into the vector
-    for (unsigned i = 0; i < sz; i++) {
-      infile >> tmp;
-      Intervals.push_back(tmp);
-    }
-    infile.close();
-  } else {
-    // We have no data. Create random data and write them into the file
-    infile.close();
-    std::ofstream outfile;
-    outfile.open(fn);
-    if (!outfile.is_open()) {
-      std::cout << "Failed to open " << fn << " for writing\n";
-      return;
-    }
-    sz = 1 + rand()%NUM_INTERVALS;
-    outfile << sz; 
-    for (unsigned i = 0; i < sz; i++) {
-      Intervals.push_back(MIN_INTERVAL + rand()%(MAX_INTERVAL - MIN_INTERVAL));
-      outfile << Intervals[i];
-    }
-    outfile.close();
-  }
-
-  return;
-}
-
-void updateDeviceStatus() {
-
-  unsigned i = 0;
-  while (!executionEnd) {
-    std::this_thread::sleep_for (std::chrono::seconds(Intervals[i]));
-    deviceStatus = !deviceStatus;
-    std::cout << "Changed device status to " << deviceStatus << "\n";
-    i = (i+1) % Intervals.size();
-  }
-
-}
-
-#endif // __DEVICE_ABSTRACTION__
diff --git a/llvm/projects/visc-cpu-rt/makefile b/llvm/projects/visc-cpu-rt/makefile
deleted file mode 100644
index a1fd6e1e01721d3be43af7a3aceed1b7a6a50b97..0000000000000000000000000000000000000000
--- a/llvm/projects/visc-cpu-rt/makefile
+++ /dev/null
@@ -1,31 +0,0 @@
-LLVM_SRC_ROOT = ../../../llvm
-LLVM_BUILD_ROOT = ../..
-
-OPENCL_INC_PATH = /opt/intel/opencl-sdk/include
-
-ifeq ($(NUM_CORES),)
-  NUM_CORES=8
-endif
-
-CPP_FLAGS = -I $(LLVM_SRC_ROOT)/include -I $(LLVM_BUILD_ROOT)/include -I $(OPENCL_INC_PATH) -std=c++11 -D__STDC_CONSTANT_MACROS -D__STDC_LIMIT_MACROS
-TARGET:=visc-rt
-
-#LLVM_CC:=$(LLVM_BUILD_ROOT)/bin/clang
-#LLVM_CXX:=$(LLVM_BUILD_ROOT)/bin/clang++
-
-LLVM_CC=clang-4.0
-LLVM_CXX=clang++-4.0
-
-OPTS =
-
-ifeq ($(DEBUG),1)
-  OPTS+=-DDEBUG_BUILD
-endif
-
-all: $(TARGET:%=%.ll)
-
-$(TARGET:%=%.ll):%.ll:%.cpp %.h
-	$(LLVM_CXX) -DNUM_CORES=$(NUM_CORES) -O3 -S -emit-llvm $(CPP_FLAGS) $(OPTS) $< -o $@
-
-clean :
-	rm -f $(TARGET).ll
diff --git a/llvm/projects/visc-cpu-rt/policy.h b/llvm/projects/visc-cpu-rt/policy.h
deleted file mode 100644
index 436ad39295d53b9d28680ff0f436ebb41a7812da..0000000000000000000000000000000000000000
--- a/llvm/projects/visc-cpu-rt/policy.h
+++ /dev/null
@@ -1,103 +0,0 @@
-#ifndef __POLICY__
-#define __POLICY__
-
-#include <string>
-#include "device_abstraction.h"
-
- /************************* Policies *************************************/
-class Policy {
-  public:
-    virtual int getVersion(const char *, int64_t) = 0;
-    virtual ~Policy() {};
-};
-
-class NodePolicy : public Policy {
-  virtual int getVersion(const char *name, int64_t it) override {
-    std::string s(name);
-    //std::string NodeNames[1] = { "_Z9mysgemmNTPfiS_iS_iiff_clonedInternal_level2_cloned" };
-    std::string NodeNames[] = {
-      "WrapperGaussianSmoothing_cloned",
-      "WrapperlaplacianEstimate_cloned",
-      "WrapperComputeZeroCrossings_cloned",
-      "WrapperComputeGradient_cloned",
-      "WrapperComputeMaxGradient_cloned",
-      "WrapperRejectZeroCrossings_cloned",
-    };
-    //if (!s.compare(NodeNames[4])) {
-    //  std::cout << s << ": CPU" << "\n";
-    //  return 0;
-    //}
-    return 2;
-  }
-};
-
-class IterationPolicy : public Policy {
-  virtual int getVersion(const char *name, int64_t it) override {
-    if ((it % 10 == 0) || (it % 10 == 1))
-      return 0;
-    else
-      return 2;
-  }
-};
-
-class DeviceStatusPolicy : public Policy {
-  virtual int getVersion(const char *name, int64_t it) override {
-    if (deviceStatus) {
-      //std::cout << "Returning GPU\n";
-      return 2;
-    }
-    else {
-      //std::cout << "Returning CPU\n";
-      return 0;
-    }
-  }
-};
-
-/* ------------------------------------------------------------------------- */
-// Added for the CFAR interactive policy demo.
-
-class InteractivePolicy : public Policy {
-private:
-  // 0 :for CPU, 1 for GPU, 2 for Vector
-  unsigned int userTargetDeviceChoice;
-  // Used to end thread execution
-  bool end;
-  // Thread that will update userTargetDeviceChoice
-
-  //std::thread userTargetDeviceChoiceThread;
-  
-  // Thread function
-  void updateUserTargetChoice() {
-    while (!end) {
-      std::cout << "Select target device (0 for CPU, 1 fpr GPU): ";
-
-      std::cin >> userTargetDeviceChoice;
-      
-      if (userTargetDeviceChoice > 1) {
-        std::cout << "Invalid target device. Selecting GPU instead.\n";
-        userTargetDeviceChoice = 1;
-      }
-      
-    }
-  }
-
-public:
-  // Inherited method, erquired for every policy object
-  virtual int getVersion(const char *name, int64_t it) {
-    return userTargetDeviceChoice;
-  }
-
-  InteractivePolicy() {
-    userTargetDeviceChoice = 1;
-    end = false;
-    //userTargetDeviceChoiceThread =
-    //  std::thread(&InteractivePolicy::updateUserTargetChoice, this);
-  }
-
-  ~InteractivePolicy() {
-    end = true;
-    //userTargetDeviceChoiceThread.join(); 
-  }
-};
-
-#endif // __POLICY__
diff --git a/llvm/projects/visc-cpu-rt/visc-rt.cpp b/llvm/projects/visc-cpu-rt/visc-rt.cpp
deleted file mode 100644
index b4e9405efb2ee5660124b12e4ca8c07d61eff8f3..0000000000000000000000000000000000000000
--- a/llvm/projects/visc-cpu-rt/visc-rt.cpp
+++ /dev/null
@@ -1,1648 +0,0 @@
-
-#include <iostream>
-#include <string>
-#include <pthread.h>
-#include <cstdlib>
-#include <cstdio>
-#include <cstring>
-#include <cassert>
-#include <map>
-#include <CL/cl.h>
-
-#include <unistd.h>
-
-#if _POSIX_VERSION >= 200112L
-# include <sys/time.h>
-#endif
-#include "visc-rt.h"
-
-#ifndef DEBUG_BUILD
-#define DEBUG(s) {}
-#else
-#define DEBUG(s) s
-#endif
-
-#define BILLION   1000000000LL
-
-using namespace std;
-
-typedef struct {
-  pthread_t threadID;
-  std::vector<pthread_t>* threads;
-  // Map from InputPort to Size 
-  std::map<unsigned, uint64_t>* ArgInPortSizeMap;
-  //std::vector<uint64_t>* BindInSizes;
-  std::vector<unsigned>* BindInSourcePort;
-  std::vector<uint64_t>* BindOutSizes;
-  std::vector<uint64_t>* EdgeSizes;
-  std::vector<CircularBuffer<uint64_t>*>* BindInputBuffers;
-  std::vector<CircularBuffer<uint64_t>*>* BindOutputBuffers;
-  std::vector<CircularBuffer<uint64_t>*>* EdgeBuffers;
-  std::vector<CircularBuffer<uint64_t>*>* isLastInputBuffers;
-} DFNodeContext_X86;
-
-typedef struct {
-  cl_context clOCLContext;
-  cl_command_queue clCommandQue;
-  cl_program clProgram;
-  cl_kernel clKernel;
-} DFNodeContext_OCL;
-
-cl_context globalOCLContext;
-cl_device_id* clDevices;
-cl_command_queue globalCommandQue;
-
-Policy *policy = NULL;
-MemTracker MTracker;
-vector<DFGDepth> DStack;
-// Mutex to prevent concurrent access by multiple thereads in pipeline
-pthread_mutex_t ocl_mtx;
-
-#define NUM_TESTS 1
-visc_TimerSet kernel_timer;
-
-static inline void checkErr(cl_int err, cl_int success, const char * name) {
-  if (err != success) {
-  cout << "ERROR: " << name << flush << "\n";
-  cout << "ErrorCode: " << err << flush << "\n";
-    exit(EXIT_FAILURE);
-  }
-}
-
-/************************* Policies *************************************/
-void llvm_visc_policy_init() {
-  cout << "Initializing policy object ...\n";
-//  policy = new NodePolicy();
-//  policy = new IterationPolicy();
-//  policy = new DeviceStatusPolicy();
-  policy = new InteractivePolicy();
-  cout << "DONE: Initializing policy object.\n";
-}
-
-void llvm_visc_policy_clear() {
-  if (policy) free(policy);
-}
-
-int llvm_visc_policy_getVersion(const char *name, int64_t i) {
-  return policy->getVersion(name, i);
-}
-
-/******************** Device Abstraction ********************************/
-//std::thread deviceStatusThread;
-/*
-void llvm_visc_deviceAbstraction_start() {
-  cout << "Starting device status simulation ...\n";
-  // Initialize vector with points where ti switch device status
-  initializeDeviceStatusIntervals();
-  // Create a thread that performs the changes
-  deviceStatusThread = std::thread(updateDeviceStatus);
-  cout << "Started device status simulation thread ...\n";
-  return;
-}
-*/
-
-void llvm_visc_deviceAbstraction_end() {
-  cout << "Ending device status simulation thread ...\n";
-  // Set the variable that allows the thread to know that execution has ended
-  executionEnd = true;
-  // Wait for the thread that manages device status to terminate
-  //deviceStatusThread.join();
-  cout << "Ended device status simulation.\n";
-  return;
-}
-
-void llvm_visc_deviceAbstraction_waitOnDeviceStatus() {
-  while (!deviceStatus) { };
-  return;
-}
-
-/************************* Depth Stack Routines ***************************/
-
-void llvm_visc_x86_dstack_push(unsigned n, uint64_t limitX, uint64_t iX, uint64_t limitY,
-    uint64_t iY, uint64_t limitZ, uint64_t iZ) {
-  //DEBUG(cout << "Pushing node information on stack:\n");
-  //DEBUG(cout << "\tNumDim = " << n << "\t Limit(" << limitX << ", " << limitY << ", "<< limitZ <<")\n");
-  //DEBUG(cout << "\tInstance(" << iX << ", " << iY << ", "<< iZ <<")\n");
-  //DFGDepth nodeInfo (n, limitX, iX, limitY, iY, limitZ, iZ);
-  //DStack.push_back(nodeInfo);
-  //DEBUG(cout << "DStack size = " << DStack.size() << flush << "\n");
-}
-
-void llvm_visc_x86_dstack_pop() {
-  //DEBUG(cout << "Popping from depth stack\n");
-  //DStack.pop_back();
-  //DEBUG(cout << "DStack size = " << DStack.size() << flush << "\n");
-}
-
-uint64_t llvm_visc_x86_getDimLimit(unsigned level, unsigned dim) {
-  //DEBUG(cout << "Request limit for dim " << dim << " of ancestor " << level <<flush << "\n");
-  //unsigned size = DStack.size();
-  //DEBUG(cout << "\t Return: " << DStack[size-level-1].getDimLimit(dim) <<flush << "\n");
-  //return DStack[size-level-1].getDimLimit(dim);
-  return 0;
-}
-
-uint64_t llvm_visc_x86_getDimInstance(unsigned level, unsigned dim) {
-  //DEBUG(cout << "Request instance id for dim " << dim << " of ancestor " << level <<flush << "\n");
-  //unsigned size = DStack.size();
-  //DEBUG(cout << "\t Return: " << DStack[size-level-1].getDimInstance(dim) <<flush << "\n");
-  //return DStack[size-level-1].getDimInstance(dim);
-  return 0;
-}
-
-/********************** Memory Tracking Routines **************************/
-
-void llvm_visc_track_mem(void* ptr, size_t size) {
-  DEBUG(cout << "Start tracking memory: " << ptr << flush << "\n");
-  MemTrackerEntry* MTE = MTracker.lookup(ptr);
-  if(MTE != NULL) {
-    DEBUG(cout << "ID " << ptr << " already present in the MemTracker Table\n");
-    return;
-  }
-  DEBUG(cout << "Inserting ID " << ptr << " in the MemTracker Table\n");
-  MTracker.insert(ptr, size, MemTrackerEntry::HOST, ptr);
-  DEBUG(MTracker.print());
-}
-
-void llvm_visc_untrack_mem(void* ptr) {
-  DEBUG(cout << "Stop tracking memory: " << ptr << flush << "\n");
-  MemTrackerEntry* MTE = MTracker.lookup(ptr);
-  if(MTE == NULL) {
-    cout << "WARNING: Trying to remove ID " << ptr << " not present in the MemTracker Table\n";
-    return;
-  }
-  DEBUG(cout << "Removing ID " << ptr << " from MemTracker Table\n");
-  if(MTE->getLocation() == MemTrackerEntry::DEVICE)
-    clReleaseMemObject((cl_mem) MTE->getAddress());
-  MTracker.remove(ptr);
-  DEBUG(MTracker.print());
-}
-
-
-static void* llvm_visc_ocl_request_mem(void* ptr, size_t size, DFNodeContext_OCL* Context, bool isInput, bool isOutput) {
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "[OCL] Request memory: " << ptr << " for context: " << Context->clOCLContext << flush << "\n");
-  MemTrackerEntry* MTE = MTracker.lookup(ptr);
-  if (MTE == NULL) {
-    MTracker.print();
-    cout << "ERROR: Requesting memory not present in Table\n";
-    exit(EXIT_FAILURE);
-  }
-  // If already on device
-  if (MTE->getLocation() == MemTrackerEntry::DEVICE &&
-      ((DFNodeContext_OCL*)MTE->getContext())->clOCLContext == Context->clOCLContext) {
-    DEBUG(cout << "\tMemory found on device at: " << MTE->getAddress() << flush << "\n"); 
-    pthread_mutex_unlock(&ocl_mtx);
-    return MTE->getAddress();
-  }
-  
-  DEBUG(cout << "\tMemory found on host at: " << MTE->getAddress() << flush << "\n");
-  DEBUG(cout << "\t"; MTE->print(); cout << flush << "\n");
-  // Else copy and update the latest copy
-  cl_mem_flags clFlags;
-  cl_int errcode;
-
-  if(isInput && isOutput) clFlags = CL_MEM_READ_WRITE;
-  else if(isInput)        clFlags = CL_MEM_READ_ONLY;
-  else if(isOutput)       clFlags = CL_MEM_WRITE_ONLY;
-  else                    clFlags = CL_MEM_READ_ONLY;
-
-  visc_SwitchToTimer(&kernel_timer, visc_TimerID_COPY);
-  //pthread_mutex_lock(&ocl_mtx);
-  cl_mem d_input = clCreateBuffer(Context->clOCLContext, clFlags, size, NULL, &errcode);
-  //pthread_mutex_unlock(&ocl_mtx);
-  checkErr(errcode, CL_SUCCESS, "Failure to allocate memory on device");
-  DEBUG(cout<< "\nMemory allocated on device: " << d_input << flush << "\n");
-  if(isInput) {
-    DEBUG(cout << "\tCopying ...");
-    //pthread_mutex_lock(&ocl_mtx);
-    errcode = clEnqueueWriteBuffer(Context->clCommandQue,
-                                  d_input,
-                                  CL_TRUE,
-                                  0,
-                                  size,MTE->getAddress(),
-                                  0,NULL,NULL);
-    //pthread_mutex_unlock(&ocl_mtx);
-    checkErr(errcode, CL_SUCCESS, "Failure to copy memory to device");
-  }
-
-  visc_SwitchToTimer(&kernel_timer, visc_TimerID_NONE);
-  DEBUG(cout << " done\n");
-  MTE->update(MemTrackerEntry::DEVICE, (void*) d_input, Context);
-  DEBUG(cout << "Updated Table\n");
-  DEBUG(MTracker.print());
-  pthread_mutex_unlock(&ocl_mtx);
-  return d_input;
-}
-
-void* llvm_visc_x86_argument_ptr(void* ptr, size_t size) {
-  return llvm_visc_request_mem(ptr, size);
-}
-
-void* llvm_visc_request_mem(void* ptr, size_t size) {
-  // Ignore objects whose size is 0 - no memory is requested.
-  if (size == 0) {
-    DEBUG(cout << "[X86] Request memory (ignored): " << ptr << flush << "\n");
-    return ptr;
-  }
-
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "[X86] Request memory: " << ptr << flush << "\n");
-  MemTrackerEntry* MTE = MTracker.lookup(ptr);
-  if(MTE == NULL) {
-    cout << "ERROR: Requesting memory not present in Table\n";
-    pthread_mutex_unlock(&ocl_mtx);
-    exit(EXIT_FAILURE);
-  }
-  // If already on host
-  if(MTE->getLocation() == MemTrackerEntry::HOST) {
-    DEBUG(cout << "\tMemory found on host at: " << MTE->getAddress() << flush << "\n"); 
-    pthread_mutex_unlock(&ocl_mtx);
-    return MTE->getAddress();
-  }
-
-  // Else copy from device and update table
-  DEBUG(cout << "\tMemory found on device at: " << MTE->getAddress() << flush << "\n");
-  DEBUG(cout << "\tCopying ...");
-  visc_SwitchToTimer(&kernel_timer, visc_TimerID_COPY);
-  //pthread_mutex_lock(&ocl_mtx);
-  cl_int errcode = clEnqueueReadBuffer(((DFNodeContext_OCL*)MTE->getContext())->clCommandQue,
-                                      (cl_mem) MTE->getAddress(),
-                                      CL_TRUE,
-                                      0,
-                                      size,
-                                      ptr,
-                                      0, NULL, NULL);
-  //pthread_mutex_unlock(&ocl_mtx);
-  visc_SwitchToTimer(&kernel_timer, visc_TimerID_NONE);
-  DEBUG(cout << " done\n");
-  checkErr(errcode, CL_SUCCESS, "[request mem] Failure to read output");
-  DEBUG(cout << "Free mem object on device\n");
-  clReleaseMemObject((cl_mem) MTE->getAddress());
-  DEBUG(cout << "Updated Table\n");
-  MTE->update(MemTrackerEntry::HOST, ptr);
-  DEBUG(MTracker.print());
-  pthread_mutex_unlock(&ocl_mtx);
-  return ptr;
-}
-
-/*************************** Timer Routines **********************************/
-
-static int is_async(enum visc_TimerID timer)
-{
-  return (timer == visc_TimerID_KERNEL) ||
-             (timer == visc_TimerID_COPY_ASYNC);
-}
-
-static int is_blocking(enum visc_TimerID timer)
-{
-  return (timer == visc_TimerID_COPY) || (timer == visc_TimerID_NONE);
-}
-
-#define INVALID_TIMERID visc_TimerID_LAST
-
-static int asyncs_outstanding(struct visc_TimerSet* timers)
-{
-  return (timers->async_markers != NULL) &&
-           (timers->async_markers->timerID != INVALID_TIMERID);
-}
-
-static struct visc_async_time_marker_list *
-get_last_async(struct visc_TimerSet* timers)
-{
-  /* Find the last event recorded thus far */
-  struct visc_async_time_marker_list * last_event = timers->async_markers;
-  if(last_event != NULL && last_event->timerID != INVALID_TIMERID) {
-    while(last_event->next != NULL &&
-            last_event->next->timerID != INVALID_TIMERID)
-      last_event = last_event->next;
-    return last_event;
-  } else
-    return NULL;
-}
-
-static void insert_marker(struct visc_TimerSet* tset, enum visc_TimerID timer)
-{
-  cl_int ciErrNum = CL_SUCCESS;
-  struct visc_async_time_marker_list ** new_event = &(tset->async_markers);
-
-  while(*new_event != NULL && (*new_event)->timerID != INVALID_TIMERID) {
-    new_event = &((*new_event)->next);
-  }
-
-  if(*new_event == NULL) {
-    *new_event = (struct visc_async_time_marker_list *)
-      			malloc(sizeof(struct visc_async_time_marker_list));
-    (*new_event)->marker = calloc(1, sizeof(cl_event));
-    /*
-    // I don't think this is needed at all. I believe clEnqueueMarker 'creates' the event
-#if ( __OPENCL_VERSION__ >= CL_VERSION_1_1 )
-fprintf(stderr, "Creating Marker [%d]\n", timer);
-    *((cl_event *)((*new_event)->marker)) = clCreateUserEvent(*clContextPtr, &ciErrNum);
-    if (ciErrNum != CL_SUCCESS) {
-      fprintf(stderr, "Error Creating User Event Object!\n");
-    }
-    ciErrNum = clSetUserEventStatus(*((cl_event *)((*new_event)->marker)), CL_QUEUED);
-    if (ciErrNum != CL_SUCCESS) {
-      fprintf(stderr, "Error Setting User Event Status!\n");
-    }
-#endif
-*/
-    (*new_event)->next = NULL;
-  }
-
-  /* valid event handle now aquired: insert the event record */
-  (*new_event)->label = NULL;
-  (*new_event)->timerID = timer;
-  //pthread_mutex_lock(&ocl_mtx);
-  ciErrNum = clEnqueueMarker(globalCommandQue, (cl_event *)(*new_event)->marker);
-  //pthread_mutex_unlock(&ocl_mtx);
-  if (ciErrNum != CL_SUCCESS) {
-      fprintf(stderr, "Error Enqueueing Marker!\n");
-  }
-
-}
-
-static void insert_submarker(struct visc_TimerSet* tset, char *label, enum visc_TimerID timer)
-{
-  cl_int ciErrNum = CL_SUCCESS;
-  struct visc_async_time_marker_list ** new_event = &(tset->async_markers);
-
-  while(*new_event != NULL && (*new_event)->timerID != INVALID_TIMERID) {
-    new_event = &((*new_event)->next);
-  }
-
-  if(*new_event == NULL) {
-    *new_event = (struct visc_async_time_marker_list *)
-      			malloc(sizeof(struct visc_async_time_marker_list));
-    (*new_event)->marker = calloc(1, sizeof(cl_event));
-    /*
-#if ( __OPENCL_VERSION__ >= CL_VERSION_1_1 )
-fprintf(stderr, "Creating SubMarker %s[%d]\n", label, timer);
-    *((cl_event *)((*new_event)->marker)) = clCreateUserEvent(*clContextPtr, &ciErrNum);
-    if (ciErrNum != CL_SUCCESS) {
-      fprintf(stderr, "Error Creating User Event Object!\n");
-    }
-    ciErrNum = clSetUserEventStatus(*((cl_event *)((*new_event)->marker)), CL_QUEUED);
-    if (ciErrNum != CL_SUCCESS) {
-      fprintf(stderr, "Error Setting User Event Status!\n");
-    }
-#endif
-*/
-    (*new_event)->next = NULL;
-  }
-
-  /* valid event handle now aquired: insert the event record */
-  (*new_event)->label = label;
-  (*new_event)->timerID = timer;
-  //pthread_mutex_lock(&ocl_mtx);
-  ciErrNum = clEnqueueMarker(globalCommandQue, (cl_event *)(*new_event)->marker);
-  //pthread_mutex_unlock(&ocl_mtx);
-  if (ciErrNum != CL_SUCCESS) {
-      fprintf(stderr, "Error Enqueueing Marker!\n");
-  }
-
-}
-
-
-/* Assumes that all recorded events have completed */
-static visc_Timestamp record_async_times(struct visc_TimerSet* tset)
-{
-  struct visc_async_time_marker_list * next_interval = NULL;
-  struct visc_async_time_marker_list * last_marker = get_last_async(tset);
-  visc_Timestamp total_async_time = 0;
-
-  for(next_interval = tset->async_markers; next_interval != last_marker;
-      next_interval = next_interval->next) {
-    cl_ulong command_start=0, command_end=0;
-    cl_int ciErrNum = CL_SUCCESS;
-
-    ciErrNum = clGetEventProfilingInfo(*((cl_event *)next_interval->marker), CL_PROFILING_COMMAND_END, sizeof(cl_ulong), &command_start, NULL);
-    if (ciErrNum != CL_SUCCESS) {
-      fprintf(stderr, "Error getting first EventProfilingInfo: %d\n", ciErrNum);
-    }
-
-    ciErrNum = clGetEventProfilingInfo(*((cl_event *)next_interval->next->marker), CL_PROFILING_COMMAND_END, sizeof(cl_ulong), &command_end, NULL);
-    if (ciErrNum != CL_SUCCESS) {
-      fprintf(stderr, "Error getting second EventProfilingInfo: %d\n", ciErrNum);
-    }
-
-    visc_Timestamp interval = (visc_Timestamp) (((double)(command_end - command_start)));
-    tset->timers[next_interval->timerID].elapsed += interval;
-    if (next_interval->label != NULL) {
-      struct visc_SubTimer *subtimer = tset->sub_timer_list[next_interval->timerID]->subtimer_list;
-      while (subtimer != NULL) {
-        if ( strcmp(subtimer->label, next_interval->label) == 0) {
-          subtimer->timer.elapsed += interval;
-          break;
-        }
-        subtimer = subtimer->next;
-      }
-    }
-    total_async_time += interval;
-    next_interval->timerID = INVALID_TIMERID;
-  }
-
-  if(next_interval != NULL)
-    next_interval->timerID = INVALID_TIMERID;
-
-  return total_async_time;
-}
-
-static void
-accumulate_time(visc_Timestamp *accum,
-		visc_Timestamp start,
-		visc_Timestamp end)
-{
-#if _POSIX_VERSION >= 200112L
-  *accum += end - start;
-#else
-# error "Timestamps not implemented for this system"
-#endif
-}
-
-#if _POSIX_VERSION >= 200112L
-static visc_Timestamp get_time()
-{
-  struct timespec tv;
-  clock_gettime(CLOCK_MONOTONIC, &tv);
-  return (visc_Timestamp) (tv.tv_sec * BILLION + tv.tv_nsec);
-}
-#else
-# error "no supported time libraries are available on this platform"
-#endif
-
-void
-visc_ResetTimer(struct visc_Timer *timer)
-{
-  timer->state = visc_Timer_STOPPED;
-
-#if _POSIX_VERSION >= 200112L
-  timer->elapsed = 0;
-#else
-# error "visc_ResetTimer: not implemented for this system"
-#endif
-}
-
-void
-visc_StartTimer(struct visc_Timer *timer)
-{
-  if (timer->state != visc_Timer_STOPPED) {
-    // FIXME: Removing warning statement to avoid printing this error
-    // fputs("Ignoring attempt to start a running timer\n", stderr);
-    return;
-  }
-
-  timer->state = visc_Timer_RUNNING;
-
-#if _POSIX_VERSION >= 200112L
-  {
-    struct timespec tv;
-    clock_gettime(CLOCK_MONOTONIC, &tv);
-    timer->init = tv.tv_sec * BILLION + tv.tv_nsec;
-  }
-#else
-# error "visc_StartTimer: not implemented for this system"
-#endif
-}
-
-void
-visc_StartTimerAndSubTimer(struct visc_Timer *timer, struct visc_Timer *subtimer)
-{
-
-  unsigned int numNotStopped = 0x3; // 11
-  if (timer->state != visc_Timer_STOPPED) {
-    fputs("Warning: Timer was not stopped\n", stderr);
-    numNotStopped &= 0x1; // Zero out 2^1
-  }
-  if (subtimer->state != visc_Timer_STOPPED) {
-    fputs("Warning: Subtimer was not stopped\n", stderr);
-    numNotStopped &= 0x2; // Zero out 2^0
-  }
-  if (numNotStopped == 0x0) {
-    //fputs("Ignoring attempt to start running timer and subtimer\n", stderr);
-    return;
-  }
-
-  timer->state = visc_Timer_RUNNING;
-  subtimer->state = visc_Timer_RUNNING;
-
-#if _POSIX_VERSION >= 200112L
-  {
-    struct timespec tv;
-    clock_gettime(CLOCK_MONOTONIC, &tv);
-
-    if (numNotStopped & 0x2) {
-      timer->init = tv.tv_sec * BILLION + tv.tv_nsec;
-    }
-
-    if (numNotStopped & 0x1) {
-      subtimer->init = tv.tv_sec * BILLION + tv.tv_nsec;
-    }
-  }
-#else
-# error "visc_StartTimer: not implemented for this system"
-#endif
-
-}
-
-void
-visc_StopTimer(struct visc_Timer *timer)
-{
-  visc_Timestamp fini;
-
-  if (timer->state != visc_Timer_RUNNING) {
-    //fputs("Ignoring attempt to stop a stopped timer\n", stderr);
-    return;
-  }
-
-  timer->state = visc_Timer_STOPPED;
-
-#if _POSIX_VERSION >= 200112L
-  {
-    struct timespec tv;
-    clock_gettime(CLOCK_MONOTONIC, &tv);
-    fini = tv.tv_sec * BILLION + tv.tv_nsec;
-  }
-#else
-# error "visc_StopTimer: not implemented for this system"
-#endif
-
-  accumulate_time(&timer->elapsed, timer->init, fini);
-  timer->init = fini;
-}
-
-void visc_StopTimerAndSubTimer(struct visc_Timer *timer, struct visc_Timer *subtimer) {
-
-  visc_Timestamp fini;
-
-  unsigned int numNotRunning = 0x3; // 11
-  if (timer->state != visc_Timer_RUNNING) {
-    fputs("Warning: Timer was not running\n", stderr);
-    numNotRunning &= 0x1; // Zero out 2^1
-  }
-  if (subtimer->state != visc_Timer_RUNNING) {
-    fputs("Warning: Subtimer was not running\n", stderr);
-    numNotRunning &= 0x2; // Zero out 2^0
-  }
-  if (numNotRunning == 0x0) {
-    //fputs("Ignoring attempt to stop stopped timer and subtimer\n", stderr);
-    return;
-  }
-
-
-  timer->state = visc_Timer_STOPPED;
-  subtimer->state = visc_Timer_STOPPED;
-
-#if _POSIX_VERSION >= 200112L
-  {
-    struct timespec tv;
-    clock_gettime(CLOCK_MONOTONIC, &tv);
-    fini = tv.tv_sec * BILLION + tv.tv_nsec;
-  }
-#else
-# error "visc_StopTimer: not implemented for this system"
-#endif
-
-  if (numNotRunning & 0x2) {
-    accumulate_time(&timer->elapsed, timer->init, fini);
-    timer->init = fini;
-  }
-
-  if (numNotRunning & 0x1) {
-    accumulate_time(&subtimer->elapsed, subtimer->init, fini);
-    subtimer->init = fini;
-  }
-
-}
-
-/* Get the elapsed time in seconds. */
-double
-visc_GetElapsedTime(struct visc_Timer *timer)
-{
-  double ret;
-
-  if (timer->state != visc_Timer_STOPPED) {
-    fputs("Elapsed time from a running timer is inaccurate\n", stderr);
-  }
-
-#if _POSIX_VERSION >= 200112L
-  ret = timer->elapsed / 1e9;
-#else
-# error "visc_GetElapsedTime: not implemented for this system"
-#endif
-  return ret;
-}
-
-void
-visc_InitializeTimerSet(struct visc_TimerSet *timers)
-{
-  int n;
-
-  timers->wall_begin = get_time();
-  timers->current = visc_TimerID_NONE;
-
-  timers->async_markers = NULL;
-
-  for (n = 0; n < visc_TimerID_LAST; n++) {
-    visc_ResetTimer(&timers->timers[n]);
-    timers->sub_timer_list[n] = NULL;
-  }
-}
-
-
-void
-visc_AddSubTimer(struct visc_TimerSet *timers, char *label, enum visc_TimerID visc_Category) {
-
-  struct visc_SubTimer *subtimer = (struct visc_SubTimer *) malloc
-    (sizeof(struct visc_SubTimer));
-
-  int len = strlen(label);
-
-  subtimer->label = (char *) malloc (sizeof(char)*(len+1));
-  sprintf(subtimer->label, "%s", label);
-
-  visc_ResetTimer(&subtimer->timer);
-  subtimer->next = NULL;
-
-  struct visc_SubTimerList *subtimerlist = timers->sub_timer_list[visc_Category];
-  if (subtimerlist == NULL) {
-    subtimerlist = (struct visc_SubTimerList *) calloc
-      (1, sizeof(struct visc_SubTimerList));
-    subtimerlist->subtimer_list = subtimer;
-    timers->sub_timer_list[visc_Category] = subtimerlist;
-  } else {
-    // Append to list
-    struct visc_SubTimer *element = subtimerlist->subtimer_list;
-    while (element->next != NULL) {
-      element = element->next;
-    }
-    element->next = subtimer;
-  }
-
-}
-
-void
-visc_SwitchToTimer(struct visc_TimerSet *timers, enum visc_TimerID timer)
-{
-  //cerr << "Switch to timer: " << timer << flush << "\n";
-  /* Stop the currently running timer */
-  if (timers->current != visc_TimerID_NONE) {
-    struct visc_SubTimerList *subtimerlist = timers->sub_timer_list[timers->current];
-    struct visc_SubTimer *currSubTimer = (subtimerlist != NULL) ? subtimerlist->current : NULL;
-
-    if (!is_async(timers->current) ) {
-      if (timers->current != timer) {
-        if (currSubTimer != NULL) {
-          visc_StopTimerAndSubTimer(&timers->timers[timers->current], &currSubTimer->timer);
-        } else {
-          visc_StopTimer(&timers->timers[timers->current]);
-        }
-      } else {
-        if (currSubTimer != NULL) {
-          visc_StopTimer(&currSubTimer->timer);
-        }
-      }
-    } else {
-      insert_marker(timers, timer);
-      if (!is_async(timer)) { // if switching to async too, keep driver going
-        visc_StopTimer(&timers->timers[visc_TimerID_DRIVER]);
-      }
-    }
-  }
-
-  visc_Timestamp currentTime = get_time();
-
-  /* The only cases we check for asynchronous task completion is
-   * when an overlapping CPU operation completes, or the next
-   * segment blocks on completion of previous async operations */
-  if( asyncs_outstanding(timers) &&
-      (!is_async(timers->current) || is_blocking(timer) ) ) {
-
-    struct visc_async_time_marker_list * last_event = get_last_async(timers);
-    /* CL_COMPLETE if completed */
-
-    cl_int ciErrNum = CL_SUCCESS;
-    cl_int async_done = CL_COMPLETE;
-
-    ciErrNum = clGetEventInfo(*((cl_event *)last_event->marker), CL_EVENT_COMMAND_EXECUTION_STATUS, sizeof(cl_int), &async_done, NULL);
-    if (ciErrNum != CL_SUCCESS) {
-      fprintf(stdout, "Error Querying EventInfo1!\n");
-    }
-
-
-    if(is_blocking(timer)) {
-      /* Async operations completed after previous CPU operations:
-       * overlapped time is the total CPU time since this set of async
-       * operations were first issued */
-
-      // timer to switch to is COPY or NONE
-      if(async_done != CL_COMPLETE) {
-        accumulate_time(&(timers->timers[visc_TimerID_OVERLAP].elapsed),
-	                  timers->async_begin,currentTime);
-      }
-
-      /* Wait on async operation completion */
-      ciErrNum = clWaitForEvents(1, (cl_event *)last_event->marker);
-      if (ciErrNum != CL_SUCCESS) {
-        fprintf(stderr, "Error Waiting for Events!\n");
-      }
-
-      visc_Timestamp total_async_time = record_async_times(timers);
-
-      /* Async operations completed before previous CPU operations:
-       * overlapped time is the total async time */
-      if(async_done == CL_COMPLETE) {
-        //fprintf(stderr, "Async_done: total_async_type = %lld\n", total_async_time);
-        timers->timers[visc_TimerID_OVERLAP].elapsed += total_async_time;
-      }
-
-    } else
-    /* implies (!is_async(timers->current) && asyncs_outstanding(timers)) */
-    // i.e. Current Not Async (not KERNEL/COPY_ASYNC) but there are outstanding
-    // so something is deeper in stack
-    if(async_done == CL_COMPLETE ) {
-      /* Async operations completed before previous CPU operations:
-       * overlapped time is the total async time */
-      timers->timers[visc_TimerID_OVERLAP].elapsed += record_async_times(timers);
-    }
-  }
-
-  /* Start the new timer */
-  if (timer != visc_TimerID_NONE) {
-    if(!is_async(timer)) {
-      visc_StartTimer(&timers->timers[timer]);
-    } else {
-      // toSwitchTo Is Async (KERNEL/COPY_ASYNC)
-      if (!asyncs_outstanding(timers)) {
-        /* No asyncs outstanding, insert a fresh async marker */
-
-        insert_marker(timers, timer);
-        timers->async_begin = currentTime;
-      } else if(!is_async(timers->current)) {
-        /* Previous asyncs still in flight, but a previous SwitchTo
-         * already marked the end of the most recent async operation,
-         * so we can rename that marker as the beginning of this async
-         * operation */
-
-        struct visc_async_time_marker_list * last_event = get_last_async(timers);
-        last_event->label = NULL;
-        last_event->timerID = timer;
-      }
-      if (!is_async(timers->current)) {
-        visc_StartTimer(&timers->timers[visc_TimerID_DRIVER]);
-      }
-    }
-  }
-  timers->current = timer;
-
-}
-
-void
-visc_SwitchToSubTimer(struct visc_TimerSet *timers, char *label, enum visc_TimerID category)
-{
-  struct visc_SubTimerList *subtimerlist = timers->sub_timer_list[timers->current];
-  struct visc_SubTimer *curr = (subtimerlist != NULL) ? subtimerlist->current : NULL;
-
-  if (timers->current != visc_TimerID_NONE) {
-    if (!is_async(timers->current) ) {
-      if (timers->current != category) {
-        if (curr != NULL) {
-          visc_StopTimerAndSubTimer(&timers->timers[timers->current], &curr->timer);
-        } else {
-          visc_StopTimer(&timers->timers[timers->current]);
-        }
-      } else {
-        if (curr != NULL) {
-          visc_StopTimer(&curr->timer);
-        }
-      }
-    } else {
-      insert_submarker(timers, label, category);
-      if (!is_async(category)) { // if switching to async too, keep driver going
-        visc_StopTimer(&timers->timers[visc_TimerID_DRIVER]);
-      }
-    }
-  }
-
-  visc_Timestamp currentTime = get_time();
-
-  /* The only cases we check for asynchronous task completion is
-   * when an overlapping CPU operation completes, or the next
-   * segment blocks on completion of previous async operations */
-  if( asyncs_outstanding(timers) &&
-      (!is_async(timers->current) || is_blocking(category) ) ) {
-
-    struct visc_async_time_marker_list * last_event = get_last_async(timers);
-    /* CL_COMPLETE if completed */
-
-    cl_int ciErrNum = CL_SUCCESS;
-    cl_int async_done = CL_COMPLETE;
-
-    ciErrNum = clGetEventInfo(*((cl_event *)last_event->marker), CL_EVENT_COMMAND_EXECUTION_STATUS, sizeof(cl_int), &async_done, NULL);
-    if (ciErrNum != CL_SUCCESS) {
-      fprintf(stdout, "Error Querying EventInfo2!\n");
-    }
-
-    if(is_blocking(category)) {
-      /* Async operations completed after previous CPU operations:
-       * overlapped time is the total CPU time since this set of async
-       * operations were first issued */
-
-      // timer to switch to is COPY or NONE
-      // if it hasn't already finished, then just take now and use that as the elapsed time in OVERLAP
-      // anything happening after now isn't OVERLAP because everything is being stopped to wait for synchronization
-      // it seems that the extra sync wall time isn't being recorded anywhere
-      if(async_done != CL_COMPLETE)
-        accumulate_time(&(timers->timers[visc_TimerID_OVERLAP].elapsed),
-	                  timers->async_begin,currentTime);
-
-      /* Wait on async operation completion */
-      ciErrNum = clWaitForEvents(1, (cl_event *)last_event->marker);
-      if (ciErrNum != CL_SUCCESS) {
-        fprintf(stderr, "Error Waiting for Events!\n");
-      }
-      visc_Timestamp total_async_time = record_async_times(timers);
-
-      /* Async operations completed before previous CPU operations:
-       * overlapped time is the total async time */
-       // If it did finish, then accumulate all the async time that did happen into OVERLAP
-       // the immediately preceding EventSynchronize theoretically didn't have any effect since it was already completed.
-      if(async_done == CL_COMPLETE /*cudaSuccess*/)
-        timers->timers[visc_TimerID_OVERLAP].elapsed += total_async_time;
-
-    } else
-    /* implies (!is_async(timers->current) && asyncs_outstanding(timers)) */
-    // i.e. Current Not Async (not KERNEL/COPY_ASYNC) but there are outstanding
-    // so something is deeper in stack
-    if(async_done == CL_COMPLETE /*cudaSuccess*/) {
-      /* Async operations completed before previous CPU operations:
-       * overlapped time is the total async time */
-      timers->timers[visc_TimerID_OVERLAP].elapsed += record_async_times(timers);
-    }
-    // else, this isn't blocking, so just check the next time around
-  }
-
-  subtimerlist = timers->sub_timer_list[category];
-  struct visc_SubTimer *subtimer = NULL;
-
-  if (label != NULL) {
-    subtimer = subtimerlist->subtimer_list;
-    while (subtimer != NULL) {
-      if (strcmp(subtimer->label, label) == 0) {
-        break;
-      } else {
-        subtimer = subtimer->next;
-      }
-    }
-  }
-
-  /* Start the new timer */
-  if (category != visc_TimerID_NONE) {
-    if(!is_async(category)) {
-      if (subtimerlist != NULL) {
-        subtimerlist->current = subtimer;
-      }
-
-      if (category != timers->current && subtimer != NULL) {
-        visc_StartTimerAndSubTimer(&timers->timers[category], &subtimer->timer);
-      } else if (subtimer != NULL) {
-        visc_StartTimer(&subtimer->timer);
-      } else {
-        visc_StartTimer(&timers->timers[category]);
-      }
-    } else {
-      if (subtimerlist != NULL) {
-        subtimerlist->current = subtimer;
-      }
-
-      // toSwitchTo Is Async (KERNEL/COPY_ASYNC)
-      if (!asyncs_outstanding(timers)) {
-        /* No asyncs outstanding, insert a fresh async marker */
-        insert_submarker(timers, label, category);
-        timers->async_begin = currentTime;
-      } else if(!is_async(timers->current)) {
-        /* Previous asyncs still in flight, but a previous SwitchTo
-         * already marked the end of the most recent async operation,
-         * so we can rename that marker as the beginning of this async
-         * operation */
-
-        struct visc_async_time_marker_list * last_event = get_last_async(timers);
-        last_event->timerID = category;
-        last_event->label = label;
-      } // else, marker for switchToThis was already inserted
-
-      //toSwitchto is already asynchronous, but if current/prev state is async too, then DRIVER is already running
-      if (!is_async(timers->current)) {
-        visc_StartTimer(&timers->timers[visc_TimerID_DRIVER]);
-      }
-    }
-  }
-
-  timers->current = category;
-}
-
-void
-visc_PrintTimerSet(struct visc_TimerSet *timers)
-{
-  visc_Timestamp wall_end = get_time();
-
-  struct visc_Timer *t = timers->timers;
-  struct visc_SubTimer* sub = NULL;
-
-  int maxSubLength;
-
-  const char *categories[] = {
-    "IO", "Kernel", "Copy", "Driver", "Copy Async", "Compute", "Overlap",
-    "Init_Ctx", "Clear_Ctx", "Copy_Scalar", "Copy_Ptr", "Mem_Free",
-    "Read_Output", "Setup", "Mem_Track", "Mem_Untrack", "Misc",
-    "Pthread_Create", "Arg_Pack", "Arg_Unpack", "Computation", "Output_Pack", "Output_Unpack"
-
-  };
-
-  const int maxCategoryLength = 20;
-
-  int i;
-  for(i = 1; i < visc_TimerID_LAST; ++i) { // exclude NONE and OVRELAP from this format
-    if(visc_GetElapsedTime(&t[i]) != 0 || true) {
-
-      // Print Category Timer
-      printf("%-*s: %.9f\n", maxCategoryLength, categories[i-1], visc_GetElapsedTime(&t[i]));
-
-      if (timers->sub_timer_list[i] != NULL) {
-        sub = timers->sub_timer_list[i]->subtimer_list;
-        maxSubLength = 0;
-        while (sub != NULL) {
-          // Find longest SubTimer label
-          if (strlen(sub->label) > (unsigned long) maxSubLength) {
-            maxSubLength = strlen(sub->label);
-          }
-          sub = sub->next;
-        }
-
-        // Fit to Categories
-        if (maxSubLength <= maxCategoryLength) {
-         maxSubLength = maxCategoryLength;
-        }
-
-        sub = timers->sub_timer_list[i]->subtimer_list;
-
-        // Print SubTimers
-        while (sub != NULL) {
-          printf(" -%-*s: %.9f\n", maxSubLength, sub->label, visc_GetElapsedTime(&sub->timer));
-          sub = sub->next;
-        }
-      }
-    }
-  }
-
-  if(visc_GetElapsedTime(&t[visc_TimerID_OVERLAP]) != 0)
-    printf("CPU/Kernel Overlap: %.9f\n", visc_GetElapsedTime(&t[visc_TimerID_OVERLAP]));
-
-  float walltime = (wall_end - timers->wall_begin)/ 1e9;
-  printf("Timer Wall Time: %.9f\n", walltime);
-
-}
-
-void visc_DestroyTimerSet(struct visc_TimerSet * timers)
-{
-  /* clean up all of the async event markers */
-  struct visc_async_time_marker_list* event = timers->async_markers;
-  while(event != NULL) {
-
-    cl_int ciErrNum = CL_SUCCESS;
-    ciErrNum = clWaitForEvents(1, (cl_event *)(event)->marker);
-    if (ciErrNum != CL_SUCCESS) {
-      //fprintf(stderr, "Error Waiting for Events!\n");
-    }
-
-    ciErrNum = clReleaseEvent( *((cl_event *)(event)->marker) );
-    if (ciErrNum != CL_SUCCESS) {
-      fprintf(stderr, "Error Release Events!\n");
-    }
-
-    free((event)->marker);
-    struct visc_async_time_marker_list* next = ((event)->next);
-
-    free(event);
-
-    // (*event) = NULL;
-    event = next;
-  }
-
-  int i = 0;
-  for(i = 0; i < visc_TimerID_LAST; ++i) {
-    if (timers->sub_timer_list[i] != NULL) {
-      struct visc_SubTimer *subtimer = timers->sub_timer_list[i]->subtimer_list;
-      struct visc_SubTimer *prev = NULL;
-      while (subtimer != NULL) {
-        free(subtimer->label);
-        prev = subtimer;
-        subtimer = subtimer->next;
-        free(prev);
-      }
-      free(timers->sub_timer_list[i]);
-    }
-  }
-}
-
-/**************************** Pipeline API ************************************/
-#define BUFFER_SIZE 1
-
-// Launch API for a streaming dataflow graph
-void* llvm_visc_streamLaunch(void(*LaunchFunc)(void*, void*), void* args) {
-  DFNodeContext_X86* Context = (DFNodeContext_X86*) malloc(sizeof(DFNodeContext_X86));
-
-  Context->threads = new std::vector<pthread_t>();
-  Context->ArgInPortSizeMap = new std::map<unsigned, uint64_t>();
-  //Context->BindInSizes = new std::vector<uint64_t>();
-  Context->BindInSourcePort = new std::vector<unsigned>();
-  Context->BindOutSizes = new std::vector<uint64_t>();
-  Context->EdgeSizes = new std::vector<uint64_t>();
-  Context->BindInputBuffers = new std::vector<CircularBuffer<uint64_t>*>();
-  Context->BindOutputBuffers = new std::vector<CircularBuffer<uint64_t>*>();
-  Context->EdgeBuffers = new std::vector<CircularBuffer<uint64_t>*>();
-  Context->isLastInputBuffers = new std::vector<CircularBuffer<uint64_t>*>();
-
-  DEBUG(cout << "StreamLaunch -- Graph: " << Context << ", Arguments: " << args << flush << "\n");
-  LaunchFunc(args, Context);
-  return Context;
-}
-
-// Push API for a streaming dataflow graph
-void llvm_visc_streamPush(void* graphID, void* args) {
-  DEBUG(cout << "StreamPush -- Graph: " << graphID << ", Arguments: " << args << flush << "\n");
-  DFNodeContext_X86* Ctx = (DFNodeContext_X86*) graphID;
-  unsigned offset = 0;
-  for (unsigned i=0; i< Ctx->ArgInPortSizeMap->size(); i++) {
-    uint64_t element;
-    memcpy(&element, (char*)args+offset, Ctx->ArgInPortSizeMap->at(i));
-    offset += Ctx->ArgInPortSizeMap->at(i);
-    for(unsigned j=0; j<Ctx->BindInputBuffers->size();j++) {
-      if(Ctx->BindInSourcePort->at(j) == i) {
-        // Push to all bind buffers connected to parent node at this port
-        //DEBUG(cout << "\tPushing Value " << element << " to buffer\n");
-        llvm_visc_bufferPush(Ctx->BindInputBuffers->at(j), element);
-      }
-    }
-  }
-  // Push 0 in isLastInput buffers of all child nodes
-  for (CircularBuffer<uint64_t>* buffer: *(Ctx->isLastInputBuffers))
-    llvm_visc_bufferPush(buffer, 0);
-}
-
-// Pop API for a streaming dataflow graph
-void* llvm_visc_streamPop(void* graphID) {
-  DEBUG(cout << "StreamPop -- Graph: " << graphID << flush << "\n");
-  DFNodeContext_X86* Ctx = (DFNodeContext_X86*) graphID;
-  unsigned totalBytes = 0;
-  for(uint64_t size:  *(Ctx->BindOutSizes))
-    totalBytes+= size;
-  void* output = malloc(totalBytes);
-  unsigned offset = 0;
-  for (unsigned i=0; i< Ctx->BindOutputBuffers->size(); i++) {
-    uint64_t element = llvm_visc_bufferPop(Ctx->BindOutputBuffers->at(i));
-    //DEBUG(cout << "\tPopped Value " << element << " from buffer\n");
-    memcpy((char*)output+offset, &element, Ctx->BindOutSizes->at(i));
-    offset += Ctx->BindOutSizes->at(i);
-  }
-  return output;
-}
-
-// Wait API for a streaming dataflow graph
-void llvm_visc_streamWait(void* graphID) {
-  DEBUG(cout << "StreamWait -- Graph: " << graphID << flush << "\n");
-  DFNodeContext_X86* Ctx = (DFNodeContext_X86*) graphID;
-  // Push garbage to all other input buffers
-  for (unsigned i=0; i< Ctx->BindInputBuffers->size(); i++) {
-    uint64_t element = 0;
-    //DEBUG(cout << "\tPushing Value " << element << " to buffer\n");
-    llvm_visc_bufferPush(Ctx->BindInputBuffers->at(i), element);
-  }
-  // Push 1 in isLastInput buffers of all child nodes
-  for (unsigned i=0; i < Ctx->isLastInputBuffers->size(); i++)
-    llvm_visc_bufferPush(Ctx->isLastInputBuffers->at(i), 1);
-
-  llvm_visc_freeThreads(graphID);
-}
-
-// Create a buffer and return the bufferID
-void* llvm_visc_createBindInBuffer(void* graphID, uint64_t size, unsigned inArgPort) {
-  DEBUG(cout << "Create BindInBuffer -- Graph: " << graphID << ", Size: " << size << flush << "\n");
-  DFNodeContext_X86* Context = (DFNodeContext_X86*) graphID;
-  CircularBuffer<uint64_t> *bufferID = new CircularBuffer<uint64_t>(BUFFER_SIZE, "BindIn");
-  DEBUG(cout << "\tNew Buffer: " << bufferID << flush << "\n");
-  Context->BindInputBuffers->push_back(bufferID);
-  (*(Context->ArgInPortSizeMap))[inArgPort] = size;
-  Context->BindInSourcePort->push_back(inArgPort);
-  //Context->BindInSizes->push_back(size);
-  return bufferID;
-}
-
-void* llvm_visc_createBindOutBuffer(void* graphID, uint64_t size) {
-  DEBUG(cout << "Create BindOutBuffer -- Graph: " << graphID << ", Size: " << size << flush << "\n");
-  DFNodeContext_X86* Context = (DFNodeContext_X86*) graphID;
-  //Twine name = Twine("Bind.Out.")+Twine(Context->BindOutputBuffers->size());
-  CircularBuffer<uint64_t> *bufferID = new CircularBuffer<uint64_t>(BUFFER_SIZE, "BindOut");
-  DEBUG(cout << "\tNew Buffer: " << bufferID << flush << "\n");
-  Context->BindOutputBuffers->push_back(bufferID);
-  Context->BindOutSizes->push_back(size);
-  return bufferID;
-}
-void* llvm_visc_createEdgeBuffer(void* graphID, uint64_t size) {
-  DEBUG(cout << "Create EdgeBuffer -- Graph: " << graphID << ", Size: " << size << flush << "\n");
-  DFNodeContext_X86* Context = (DFNodeContext_X86*) graphID;
-  //Twine name = Twine("Edge.")+Twine(Context->EdgeBuffers->size());
-  CircularBuffer<uint64_t> *bufferID = new CircularBuffer<uint64_t>(BUFFER_SIZE, "Edge");
-  DEBUG(cout << "\tNew Buffer: " << bufferID << flush << "\n");
-  Context->EdgeBuffers->push_back(bufferID);
-  Context->EdgeSizes->push_back(size);
-  return bufferID;
-}
-
-void* llvm_visc_createLastInputBuffer(void* graphID, uint64_t size) {
-  DEBUG(cout << "Create isLastInputBuffer -- Graph: " << graphID << ", Size: " << size << flush << "\n");
-  DFNodeContext_X86* Context = (DFNodeContext_X86*) graphID;
-  //Twine name = Twine("isLastInput.")+Twine(Context->EdgeBuffers->size());
-  CircularBuffer<uint64_t> *bufferID = new CircularBuffer<uint64_t>(BUFFER_SIZE, "LastInput");
-  DEBUG(cout << "\tNew Buffer: " << bufferID << flush << "\n");
-  Context->isLastInputBuffers->push_back(bufferID);
-  return bufferID;
-}
-
-// Free buffers 
-void llvm_visc_freeBuffers(void* graphID) {
-  DEBUG(cout << "Free all buffers -- Graph: " << graphID << flush << "\n");
-  DFNodeContext_X86* Context = (DFNodeContext_X86*) graphID;
-  for(CircularBuffer<uint64_t>* bufferID: *(Context->BindInputBuffers))
-    delete bufferID;
-  for(CircularBuffer<uint64_t>* bufferID: *(Context->BindOutputBuffers))
-    delete bufferID;
-  for(CircularBuffer<uint64_t>* bufferID: *(Context->EdgeBuffers))
-    delete bufferID;
-  for(CircularBuffer<uint64_t>* bufferID: *(Context->isLastInputBuffers))
-    delete bufferID;
-}
-
-// Pop an element from the buffer
-uint64_t llvm_visc_bufferPop(void* bufferID) {
-  CircularBuffer<uint64_t>* buffer = (CircularBuffer<uint64_t>*) bufferID;
-  return buffer->pop();
-}
-
-// Push an element into the buffer
-void llvm_visc_bufferPush(void* bufferID, uint64_t element) {
-  CircularBuffer<uint64_t>* buffer = (CircularBuffer<uint64_t>*) bufferID;
-  buffer->push(element);
-}
-
-// Create a thread
-void llvm_visc_createThread(void* graphID, void* (*Func)(void*), void* arguments) {
-  DEBUG(cout << "Create Thread -- Graph: " << graphID << ", Func: " << Func << ", Args: " << arguments << flush << "\n");
-  DFNodeContext_X86* Ctx = (DFNodeContext_X86*) graphID;
-  int err;
-  pthread_t threadID;
-  if((err = pthread_create(&threadID, NULL, Func, arguments)) != 0)
-    cout << "Failed to create thread. Error code = " << err << flush << "\n";
-
-  Ctx->threads->push_back(threadID);
-}
-
-// Wait for thread to finish
-void llvm_visc_freeThreads(void* graphID) {
-  DEBUG(cout << "Free Threads -- Graph: " << graphID << flush << "\n");
-  DFNodeContext_X86* Ctx = (DFNodeContext_X86*) graphID;
-  for(pthread_t thread: *(Ctx->threads))
-    pthread_join(thread, NULL);
-}
-
-/************************ OPENCL & PTHREAD API ********************************/
-
-void* llvm_visc_x86_launch(void* (*rootFunc)(void*), void* arguments) {
-  DFNodeContext_X86 *Context = (DFNodeContext_X86*) malloc(sizeof(DFNodeContext_X86));
-  //int err;
-  //if((err = pthread_create(&Context->threadID, NULL, rootFunc, arguments)) != 0)
-    //cout << "Failed to create pthread. Error code = " << err << flush << "\n";
-  rootFunc(arguments);
-  return Context;
-}
-
-void llvm_visc_x86_wait(void* graphID) {
-  DEBUG(cout << "Waiting for pthread to finish ...\n");
-  //DFNodeContext_X86* Context = (DFNodeContext_X86*) graphID;
-  //pthread_join(Context->threadID, NULL);
-  free(graphID);
-  DEBUG(cout << "\t... pthread Done!\n");
-}
-
-void* llvm_visc_ocl_initContext(enum visc::Target T) {
-  pthread_mutex_lock(&ocl_mtx);
-  //DEBUG(std::string Target = T == visc::GPU_TARGET? "GPU" : "SPIR");
-  DEBUG(const char* Target = T == visc::GPU_TARGET? "GPU" : "SPIR");
-  DEBUG(cout << "Initializing Context for " << Target << " device\n");
-  cl_uint numPlatforms;
-  cl_int errcode;
-  errcode = clGetPlatformIDs(0, NULL, &numPlatforms);
-  checkErr(errcode, CL_SUCCESS, "Failure to get number of platforms");
-
-  // now get all the platform IDs
-  cl_platform_id* platforms = (cl_platform_id*) malloc(sizeof(cl_platform_id)*numPlatforms);
-  errcode = clGetPlatformIDs(numPlatforms, platforms, NULL);
-  checkErr(errcode, CL_SUCCESS, "Failure to get platform IDs");
-
-
-  for(unsigned i=0; i < numPlatforms; i++) {
-    char buffer[10240];
-    DEBUG(cout << "Device " << i << " Info -->\n");
-    clGetPlatformInfo(platforms[i], CL_PLATFORM_PROFILE, 10240, buffer, NULL);
-    DEBUG(cout << "\tPROFILE = " << buffer << flush << "\n");
-    clGetPlatformInfo(platforms[i], CL_PLATFORM_VERSION, 10240, buffer, NULL);
-    DEBUG(cout << "\tVERSION = "<< buffer << flush << "\n");
-    clGetPlatformInfo(platforms[i], CL_PLATFORM_NAME, 10240, buffer, NULL);
-    DEBUG(cout << "\tNAME = " << buffer << flush << "\n");
-    clGetPlatformInfo(platforms[i], CL_PLATFORM_VENDOR, 10240, buffer, NULL);
-    DEBUG(cout << "\tVENDOR = " << buffer << flush << "\n");
-    clGetPlatformInfo(platforms[i], CL_PLATFORM_EXTENSIONS, 10240, buffer, NULL);
-    DEBUG(cout << "\tEXTENSIONS = " << buffer << flush << "\n");
-  }
-  // set platform property - just pick the first one
-  //cl_context_properties properties[] = {CL_CONTEXT_PLATFORM,
-                                        //(long) platforms[0],
-                                        //0};
-  //globalOCLContext = clCreateContextFromType(properties, CL_DEVICE_TYPE_GPU,
-                                         //NULL, NULL, &errcode);
-  assert(numPlatforms >= 2 && "Expecting two OpenCL platforms");
-  // Choose second one which is X86 AVX
-  cl_context_properties properties[] = {CL_CONTEXT_PLATFORM,
-                                        (long) platforms[T == visc::GPU_TARGET? 0 : 1],
-                                        0};
-  globalOCLContext = clCreateContextFromType(properties,
-                                            T == visc::GPU_TARGET?
-                                              CL_DEVICE_TYPE_GPU : CL_DEVICE_TYPE_CPU,
-                                       NULL, NULL, &errcode);
-  // get the list of OCL devices associated with context
-  size_t dataBytes;
-  errcode = clGetContextInfo(globalOCLContext, CL_CONTEXT_DEVICES, 0,
-                              NULL, &dataBytes);
-  checkErr(errcode, CL_SUCCESS, "Failure to get context info length");
-
-  clDevices = (cl_device_id *) malloc(dataBytes);
-  errcode |= clGetContextInfo(globalOCLContext, CL_CONTEXT_DEVICES, dataBytes,
-                              clDevices, NULL);
-  checkErr(errcode, CL_SUCCESS, "Failure to get context info");
-  if(false && T == visc::SPIR_TARGET) {
-    cl_device_partition_property props[4];
-    props[0] = CL_DEVICE_PARTITION_BY_COUNTS;
-    props[1] = NUM_CORES;
-    props[2] = CL_DEVICE_PARTITION_BY_COUNTS_LIST_END;
-    props[3] = 0;
-    cl_device_id subdevice_id[8];
-    cl_uint num_entries = 8;
-
-    cl_uint numDevices;
-    clCreateSubDevices(clDevices[0], props, num_entries, subdevice_id, &numDevices);
-    //printf("Num of devices = %d\n", numDevices);
-    //for(unsigned i =0 ; i< numDevices; i++)
-      //printf("Subdevice id %d = %p\n", i, subdevice_id[i]);
-    clDevices[0] = subdevice_id[0];
-    globalOCLContext = clCreateContext(properties, 1, clDevices, NULL, NULL, &errcode);
-    checkErr(errcode, CL_SUCCESS, "Failure to create OCL context");
-  }
-
-  free(platforms);
-  DEBUG(cout << "\tContext " << globalOCLContext << flush << "\n");
-  checkErr(errcode, CL_SUCCESS, "Failure to create OCL context");
-
-  DEBUG(cout << "Initialize Kernel Timer\n");
-  visc_InitializeTimerSet(&kernel_timer);
-
-  pthread_mutex_unlock(&ocl_mtx);
-  return globalOCLContext;
-}
-
-void llvm_visc_ocl_clearContext(void* graphID) {
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "Clear Context\n");
-  DFNodeContext_OCL* Context = (DFNodeContext_OCL*) graphID;
-  // FIXME: Have separate function to release command queue and clear context.
-  // Would be useful when a context has multiple command queues
-  clReleaseKernel(Context->clKernel);
-  //clReleaseProgram(Context->clProgram);
-  //clReleaseCommandQueue(Context->clCommandQue);
-  //clReleaseContext(globalOCLContext);
-  //DEBUG(cout << "Released context at: " << globalOCLContext);
-  free(Context);
-  DEBUG(cout << "Done with OCL kernel\n");
-  cout << "Printing VISC Timer: KernelTimer\n";
-  visc_PrintTimerSet(&kernel_timer);
-  pthread_mutex_unlock(&ocl_mtx);
-
-}
-
-void llvm_visc_ocl_argument_shared(void* graphID, int arg_index, size_t size) {
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "Set Shared Memory Input:");
-  DEBUG(cout << "\tArgument Index = " << arg_index << ", Size = " << size << flush << "\n");
-  DFNodeContext_OCL* Context = (DFNodeContext_OCL*) graphID;
-  DEBUG(cout << "Using Context: " << Context << flush << "\n");
-  DEBUG(cout << "Using clKernel: " << Context->clKernel << flush << "\n");
-  //pthread_mutex_lock(&ocl_mtx);
-  cl_int errcode = clSetKernelArg(Context->clKernel, arg_index, size, NULL);
-  //pthread_mutex_unlock(&ocl_mtx);
-  checkErr(errcode, CL_SUCCESS, "Failure to set shared memory argument");
-  pthread_mutex_unlock(&ocl_mtx);
-}
-
-void llvm_visc_ocl_argument_scalar(void* graphID, void* input, int arg_index, size_t size) {
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "Set Scalar Input:");
-  DEBUG(cout << "\tArgument Index = " << arg_index << ", Size = " << size << flush << "\n");
-  DFNodeContext_OCL* Context = (DFNodeContext_OCL*) graphID;
-  DEBUG(cout << "Using Context: " << Context << flush << "\n");
-  DEBUG(cout << "Using clKernel: " << Context->clKernel << flush << "\n");
-  //pthread_mutex_lock(&ocl_mtx);
-  cl_int errcode = clSetKernelArg(Context->clKernel, arg_index, size, input);
-  //pthread_mutex_unlock(&ocl_mtx);
-  checkErr(errcode, CL_SUCCESS, "Failure to set constant input argument");
-  pthread_mutex_unlock(&ocl_mtx);
-}
-
-void* llvm_visc_ocl_argument_ptr(void* graphID, void* input, int arg_index, size_t size, bool isInput, bool isOutput) {
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "Set Pointer Input:");
-  DEBUG(cout << "\tArgument Index = " << arg_index << ", Ptr = " << input << ", Size = "<< size << flush << "\n");
-  // Size should be non-zero
-  assert(size != 0 && "Size of data pointed to has to be non-zero!");
-  DEBUG(cout << "\tInput = "<< isInput << "\tOutput = " << isOutput << flush << "\n");
-  DFNodeContext_OCL* Context = (DFNodeContext_OCL*) graphID;
-  
-  pthread_mutex_unlock(&ocl_mtx);
-  // Check with runtime the location of this memory 
-  cl_mem d_input = (cl_mem) llvm_visc_ocl_request_mem(input, size, Context, isInput, isOutput);
-  
-  pthread_mutex_lock(&ocl_mtx);
-  // Set Kernel Argument
-  //pthread_mutex_lock(&ocl_mtx);
-  cl_int errcode = clSetKernelArg(Context->clKernel, arg_index, sizeof(cl_mem), (void*)&d_input);
-  //pthread_mutex_unlock(&ocl_mtx);
-  checkErr(errcode, CL_SUCCESS, "Failure to set pointer argument");
-  DEBUG(cout << "\tDevicePtr = " << d_input << flush << "\n");
-  pthread_mutex_unlock(&ocl_mtx);
-  return d_input;
-}
-
-void* llvm_visc_ocl_output_ptr(void* graphID, int arg_index, size_t size) {
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "Set device memory for Output Struct:");
-  DEBUG(cout << "\tArgument Index = " << arg_index << ", Size = "<< size << flush << "\n");
-  DFNodeContext_OCL* Context = (DFNodeContext_OCL*) graphID;
-  cl_int errcode;
-  //pthread_mutex_lock(&ocl_mtx);
-  cl_mem d_output = clCreateBuffer(Context->clOCLContext, CL_MEM_WRITE_ONLY, size, NULL, &errcode);
-  //pthread_mutex_unlock(&ocl_mtx);
-  checkErr(errcode, CL_SUCCESS, "Failure to create output buffer on device");
-  //pthread_mutex_lock(&ocl_mtx);
-  errcode = clSetKernelArg(Context->clKernel, arg_index, sizeof(cl_mem), (void*)&d_output);
-  //pthread_mutex_unlock(&ocl_mtx);
-  checkErr(errcode, CL_SUCCESS, "Failure to set pointer argument");
-  DEBUG(cout << "\tDevicePtr = " << d_output << flush << "\n");
-  pthread_mutex_unlock(&ocl_mtx);
-  return d_output;
-}
-
-void llvm_visc_ocl_free(void* ptr) {
-  //DEBUG(cout << "Release Device Pointer: " << ptr << flush << "\n");
-  //cl_mem d_ptr = (cl_mem) ptr;
-  //clReleaseMemObject(d_ptr);
-}
-
-void* llvm_visc_ocl_getOutput(void* graphID, void* h_output, void* d_output, size_t size) {
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "Get Output:\n");
-  DEBUG(cout << "\tHostPtr = " << h_output << ", DevicePtr = " << d_output << ", Size = "<< size << flush << "\n");
-  if(h_output == NULL)
-    h_output = malloc(size);
-  DFNodeContext_OCL* Context = (DFNodeContext_OCL*) graphID;
-  //pthread_mutex_lock(&ocl_mtx);
-  cl_int errcode = clEnqueueReadBuffer(Context->clCommandQue, (cl_mem)d_output, CL_TRUE, 0, size,
-                                h_output, 0, NULL, NULL);
-  //pthread_mutex_unlock(&ocl_mtx);
-  checkErr(errcode, CL_SUCCESS, "[getOutput] Failure to read output");
-  pthread_mutex_unlock(&ocl_mtx);
-  return h_output;
-}
-
-void* llvm_visc_ocl_executeNode(void* graphID, unsigned workDim , const size_t*
-                                localWorkSize, const size_t* globalWorkSize) {
-  pthread_mutex_lock(&ocl_mtx);
-
-  size_t GlobalWG[3];
-  size_t LocalWG[3];
-
-  // OpenCL EnqeueNDRangeKernel function results in segementation fault if we
-  // directly use local and global work groups arguments. Hence, allocating it
-  // on stack and copying.
-  for(unsigned i=0; i<workDim; i++) {
-    GlobalWG[i] = globalWorkSize[i];
-  }
-
-  // OpenCL allows local workgroup to be null.
-  if(localWorkSize != NULL) {
-    for(unsigned i=0; i<workDim; i++) {
-      LocalWG[i] = localWorkSize[i];
-    }
-  }
-
-  DFNodeContext_OCL* Context = (DFNodeContext_OCL*) graphID;
-  // TODO: Would like to use event to ensure better scheduling of kernels.
-  // Currently passing the event paratemeter results in seg fault with
-  // clEnqueueNDRangeKernel.
-  cl_event* event;
-  DEBUG(cout << "Enqueuing kernel:\n");
-  DEBUG(cout << "\tCommand Queue: " << Context->clCommandQue << flush << "\n");
-  DEBUG(cout << "\tKernel: " << Context->clKernel << flush << "\n");
-  DEBUG(cout << "\tNumber of dimensions: " << workDim << flush << "\n");
-  DEBUG(cout << "\tGlobal Work Group: ( ");
-  for(unsigned i = 0; i<workDim; i++) {
-    DEBUG(cout << GlobalWG[i] << " ");
-  }
-  DEBUG(cout << ")\n");
-  if(localWorkSize != NULL) {
-    DEBUG(cout << "\tLocal Work Group: ( ");
-    for(unsigned i = 0; i<workDim; i++) {
-      DEBUG(cout << LocalWG[i] << " ");
-    }
-    DEBUG(cout << ")\n");
-  }
-  //pthread_mutex_lock(&ocl_mtx);
-  clFinish(Context->clCommandQue);
-  //pthread_mutex_unlock(&ocl_mtx);
-  visc_SwitchToTimer(&kernel_timer, visc_TimerID_COMPUTATION);
-  //for(int i=0 ;i < NUM_TESTS; i++) {
-    //cout << "Iteration = " << i << flush << "\n";
-    //pthread_mutex_lock(&ocl_mtx);
-    cl_int errcode = clEnqueueNDRangeKernel(Context->clCommandQue,
-        Context->clKernel, workDim, NULL, GlobalWG, (localWorkSize == NULL)? NULL :  LocalWG, 0, NULL, NULL);
-    //pthread_mutex_unlock(&ocl_mtx);
-    checkErr(errcode, CL_SUCCESS, "Failure to enqueue kernel");
-  //}
-  //pthread_mutex_lock(&ocl_mtx);
-  clFinish(Context->clCommandQue);
-  //pthread_mutex_unlock(&ocl_mtx);
-  visc_SwitchToTimer(&kernel_timer, visc_TimerID_NONE);
-  
-  pthread_mutex_unlock(&ocl_mtx);
-  return event;
-}
-
-
-//////////////////////////////////////////////////////////////////////////////
-//! Loads a Program binary file.
-//!
-//! @return the source string if succeeded, 0 otherwise
-//! @param Filename        program filename
-//! @param szFinalLength    returned length of the code string
-//////////////////////////////////////////////////////////////////////////////
-static char* LoadProgSource(const char* Filename, size_t* szFinalLength)
-{
-  DEBUG(cout << "Load Prog Source\n");
-  // locals
-  FILE* pFileStream = NULL;
-  size_t szSourceLength;
-
-  // open the OpenCL source code file
-  pFileStream = fopen(Filename, "rb");
-  if(pFileStream == 0)
-  {
-    return NULL;
-  }
-
-  // get the length of the source code
-  fseek(pFileStream, 0, SEEK_END);
-  szSourceLength = ftell(pFileStream);
-  fseek(pFileStream, 0, SEEK_SET);
-
-  // allocate a buffer for the source code string and read it in
-  char* cSourceString = (char *)malloc(szSourceLength + 1);
-  if (fread((cSourceString), szSourceLength, 1, pFileStream) != 1)
-  {
-      fclose(pFileStream);
-      free(cSourceString);
-      return 0;
-  }
-
-  // close the file and return the total length of the combined (preamble + source) string
-  fclose(pFileStream);
-  if(szFinalLength != 0)
-  {
-      *szFinalLength = szSourceLength;
-  }
-  cSourceString[szSourceLength] = '\0';
-
-  return cSourceString;
-}
-
-void* llvm_visc_ocl_launch(const char* FileName, const char* KernelName) {
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "Launch OCL Kernel\n");
-  // Initialize OpenCL
-
-  // OpenCL specific variables
-  DFNodeContext_OCL *Context = (DFNodeContext_OCL *) malloc(sizeof(DFNodeContext_OCL));
-
-  size_t kernelLength;
-  cl_int errcode;
-
-  // For a single context for all kernels
-  Context->clOCLContext = globalOCLContext;
-
-  //Create a command-queue
-  //pthread_mutex_lock(&ocl_mtx);
-  Context->clCommandQue = clCreateCommandQueue(Context->clOCLContext, clDevices[0], CL_QUEUE_PROFILING_ENABLE, &errcode);
-  globalCommandQue = Context->clCommandQue;
-  //pthread_mutex_unlock(&ocl_mtx);
-  checkErr(errcode, CL_SUCCESS, "Failure to create command queue");
-
-  DEBUG(cout << "Loading program binary: " << FileName << flush << "\n");
-  char *programSource = LoadProgSource(FileName, &kernelLength);
-  checkErr(programSource != NULL, 1 /*bool true*/, "Failure to load Program Binary");
-
-  cl_int binaryStatus;
-  //pthread_mutex_lock(&ocl_mtx);
-  Context->clProgram = clCreateProgramWithBinary(Context->clOCLContext, 1, &clDevices[0],
-                                        &kernelLength,
-                                        (const unsigned char **)&programSource,
-                                        &binaryStatus, &errcode);
-  //pthread_mutex_unlock(&ocl_mtx);
-  checkErr(errcode, CL_SUCCESS, "Failure to create program from binary");
-
-  DEBUG(cout << "Building kernel - " << KernelName << " from file " << FileName << flush << "\n");
-  errcode = clBuildProgram(Context->clProgram, 0, NULL, NULL, NULL, NULL);
-  // If build fails, get build log from device
-  if(errcode != CL_SUCCESS) {
-    cout << "ERROR: Failure to build program\n";
-    size_t len = 0;
-    errcode = clGetProgramBuildInfo(Context->clProgram, clDevices[0] , CL_PROGRAM_BUILD_LOG, 0,
-        NULL, &len);
-    cout << "LOG LENGTH: " << len << flush << "\n";
-    checkErr(errcode, CL_SUCCESS, "Failure to collect program build log length");
-    char *log = (char*) malloc(len*sizeof(char));
-    errcode = clGetProgramBuildInfo(Context->clProgram, clDevices[0], CL_PROGRAM_BUILD_LOG, len,
-        log, NULL);
-    checkErr(errcode, CL_SUCCESS, "Failure to collect program build log");
-
-    cout << "Device Build Log:\n" << log << flush << "\n";
-    free(log);
-    pthread_mutex_unlock(&ocl_mtx);
-    exit(EXIT_FAILURE);
-  }
-
-  Context->clKernel = clCreateKernel(Context->clProgram, KernelName, &errcode);
-  checkErr(errcode, CL_SUCCESS, "Failure to create kernel");
-
-  DEBUG(cout << "Kernel ID = " << Context->clKernel << "\n");
-  //free(clDevices);
-  free(programSource);
-
-  pthread_mutex_unlock(&ocl_mtx);
-  return Context;
-}
-
-
-void llvm_visc_ocl_wait(void* graphID) {
-  pthread_mutex_lock(&ocl_mtx);
-  DEBUG(cout << "Wait\n");
-  DFNodeContext_OCL *Context = (DFNodeContext_OCL*) graphID;
-  //pthread_mutex_lock(&ocl_mtx);
-  clFinish(Context->clCommandQue);
-  //pthread_mutex_unlock(&ocl_mtx);
-  pthread_mutex_unlock(&ocl_mtx);
-}
-
-void llvm_visc_switchToTimer(void** timerSet, enum visc_TimerID timer) {
-  //cout << "Switching to timer " << timer << flush << "\n";
-  pthread_mutex_lock(&ocl_mtx);
-  //visc_SwitchToTimer((visc_TimerSet*)(*timerSet), timer);
-  pthread_mutex_unlock(&ocl_mtx);
-}
-void llvm_visc_printTimerSet(void** timerSet, char* timerName) {
-  pthread_mutex_lock(&ocl_mtx);
-  cout << "Printing VISC Timer: ";
-  if(timerName != NULL)
-    cout << timerName << flush << "\n";
-  else
-    cout << "Anonymous\n";
-  visc_PrintTimerSet((visc_TimerSet*) (*timerSet));
-  pthread_mutex_unlock(&ocl_mtx);
-}
-
-void* llvm_visc_initializeTimerSet() {
-  pthread_mutex_lock(&ocl_mtx);
-  visc_TimerSet* TS = (visc_TimerSet*) malloc (sizeof(visc_TimerSet));
-  visc_InitializeTimerSet(TS);
-  pthread_mutex_unlock(&ocl_mtx);
-  return TS;
-}
-
-
-
diff --git a/llvm/projects/visc-cpu-rt/visc-rt.h b/llvm/projects/visc-cpu-rt/visc-rt.h
deleted file mode 100644
index 5e7546f582f09173d5da4f4129218275873e17bb..0000000000000000000000000000000000000000
--- a/llvm/projects/visc-cpu-rt/visc-rt.h
+++ /dev/null
@@ -1,302 +0,0 @@
-/*
- *
- * (c) 2010 The Board of Trustees of the University of Illinois.
- */
-#ifndef VISC_RT_HEADER
-#define VISC_RT_HEADER
-
-#include <iostream>
-#include <map>
-#include <ctime>
-#include <vector>
-#include <pthread.h>
-#include <string>
-//#include <condition_variable>
-
-#include "llvm/SupportVISC/VISCHint.h"
-#include "llvm/SupportVISC/VISCTimer.h"
-#include "device_abstraction.h"
-#include "policy.h"
-#ifndef DEBUG_BUILD
-#define DEBUG(s) {}
-#else
-#define DEBUG(s) s
-#endif
-
-
-
-using namespace std;
-
-extern "C" {
-
-/************************* Policies *************************************/
-
-void llvm_visc_policy_init();
-void llvm_visc_policy_clear();
-int llvm_visc_policy_getVersion(const char *, int64_t);
-
-/******************** Device Abstraction ********************************/
-void llvm_visc_deviceAbstraction_start();
-void llvm_visc_deviceAbstraction_end();
-void llvm_visc_deviceAbstraction_waitOnDeviceStatus();
-
-/********************* DFG Depth Stack **********************************/
-class DFGDepth {
-  private:
-    unsigned numDim;
-    unsigned dimLimit[3];
-    unsigned dimInstance[3];
-  public:
-    DFGDepth() {}
-    DFGDepth(unsigned n, unsigned dimX = 0, unsigned iX = 0, unsigned dimY = 0, unsigned iY = 0,
-        unsigned dimZ = 0, unsigned iZ = 0) {
-      assert(n <= 3 && "Error! More than 3 dimensions not supported");
-      numDim = n;
-      dimLimit[0] = dimX;
-      dimLimit[1] = dimY;
-      dimLimit[2] = dimZ;
-      dimInstance[0] = iX;
-      dimInstance[1] = iY;
-      dimInstance[2] = iZ;
-    }
-    unsigned getDimLimit(unsigned dim) {
-      assert(dim <= numDim && "Error! Requested dimension limit is not specified");
-      return dimLimit[dim];
-    }
-
-    unsigned getDimInstance(unsigned dim) {
-      assert(dim <= numDim && "Error! Requested dimension instance is not specified");
-      return dimInstance[dim];
-    }
-
-    unsigned getNumDim() {
-      return numDim;
-    }
-};
-
-void llvm_visc_x86_dstack_push(unsigned n, uint64_t limitX = 0, uint64_t iX = 0,
-    uint64_t limitY = 0, uint64_t iY = 0, uint64_t limitZ = 0, uint64_t iZ = 0);
-void llvm_visc_x86_dstack_pop();
-uint64_t llvm_visc_x86_getDimLimit(unsigned level, unsigned dim);
-uint64_t llvm_visc_x86_getDimInstance(unsigned level, unsigned dim);
-
-
-/********************* Memory Tracker **********************************/
-class MemTrackerEntry {
-public:
-  enum Location {HOST, DEVICE};
-  private:
-    size_t size;
-    Location loc;
-    void* addr;
-    void* Context;
-
-  public:
-    MemTrackerEntry(size_t _size, Location _loc, void* _addr, void* _Context):
-      size(_size), loc(_loc), addr(_addr), Context(_Context) {
-    }
-
-    size_t getSize() {
-      return size;
-    }
-
-    Location getLocation() {
-      return loc;
-    }
-
-    void* getAddress() {
-      return addr;
-    }
-
-    void* getContext() {
-      return Context;
-    }
-
-    void update(Location _loc, void* _addr, void* _Context = NULL) {
-      loc = _loc;
-      addr = _addr;
-      Context = _Context;
-    }
-
-    void print() {
-      cout << "Size = " << size << "\tLocation = " << loc << "\tAddress = " << addr << "\tContext = " << Context;
-    }
-};
-
-
-class MemTracker {
-
-private:
-  std::map<void*, MemTrackerEntry*> Table;
-
-public:
-  MemTracker() {
-  }
-
-  bool insert(void* ID, size_t size, MemTrackerEntry::Location loc, void* addr, void* Context = NULL) {
-    MemTrackerEntry* MTE = new MemTrackerEntry(size, loc, addr, Context);
-    Table.insert(std::pair<void*, MemTrackerEntry*>(ID, MTE));
-    return MTE != NULL;
-  }
-
-  MemTrackerEntry* lookup(void* ID) {
-    if(Table.count(ID) == 0)
-      return NULL;
-    return Table[ID];
-  }
-
-  void remove(void* ID) {
-    MemTrackerEntry* MTE = Table[ID];
-    free(MTE);
-    Table.erase(ID);
-  }
-
-  void print() {
-    cout << "Printing Table ... Size = " << Table.size() << flush << "\n";
-    for(auto& Entry: Table) {
-      cout << Entry.first << ":\t" ;
-      Entry.second->print();
-      cout << flush << "\n";
-    }
-  }
-
-};
-
-void llvm_visc_track_mem(void*, size_t);
-void llvm_visc_untrack_mem(void*);
-void* llvm_visc_request_mem(void*, size_t);
-
-/*********************** OPENCL & PTHREAD API **************************/
-void* llvm_visc_x86_launch(void* (void*), void*);
-void llvm_visc_x86_wait(void*);
-void* llvm_visc_ocl_initContext(enum visc::Target);
-
-void* llvm_visc_x86_argument_ptr(void*, size_t);
-
-void llvm_visc_ocl_clearContext(void*);
-void llvm_visc_ocl_argument_shared(void*, int, size_t);
-void llvm_visc_ocl_argument_scalar(void*, void*, int, size_t);
-void* llvm_visc_ocl_argument_ptr(void*, void*, int, size_t, bool, bool);
-void* llvm_visc_ocl_output_ptr(void*, int, size_t);
-void llvm_visc_ocl_free(void*);
-void* llvm_visc_ocl_getOutput(void*, void*, void*, size_t);
-void* llvm_visc_ocl_executeNode(void*, unsigned, const size_t*, const size_t*);
-void* llvm_visc_ocl_launch(const char*, const char*);
-void llvm_visc_ocl_wait(void*);
-
-void llvm_visc_switchToTimer(void** timerSet, enum visc_TimerID);
-void llvm_visc_printTimerSet(void** timerSet, char* timerName = NULL);
-void* llvm_visc_initializeTimerSet();
-
-}
-
-/*************************** Pipeline API ******************************/
-// Circular Buffer class
-unsigned counter = 0;
-template <class ElementType>
-class CircularBuffer {
-private:
-    int numElements;
-    int bufferSize;
-    int Head;
-    int Tail;
-    pthread_mutex_t mtx;
-    pthread_cond_t cv;
-    vector<ElementType> buffer;
-    std::string name;
-    unsigned ID;
-
-public:
-    CircularBuffer(int maxElements, std::string _name =  "ANON") {
-        ID = counter;
-        Head = 0;
-        Tail = 0;
-        numElements = 0;
-        name = _name;
-        bufferSize = maxElements+1;
-        buffer.reserve(bufferSize);
-        pthread_mutex_init(&mtx, NULL);
-        pthread_cond_init(&cv, NULL);
-        counter++;
-
-    }
-
-    bool push(ElementType E);
-    ElementType pop();
-
-};
-
-template <class ElementType>
-bool CircularBuffer<ElementType>::push(ElementType E) {
-    //DEBUG(cout << name << " Buffer[" << ID << "]: Push " << E << flush << "\n");
-    //unique_lock<mutex> lk(mtx);
-    pthread_mutex_lock(&mtx);
-    if((Head +1) % bufferSize == Tail) {
-        //DEBUG(cout << name << " Buffer[" << ID << "]: Push going to sleep ...\n");
-        //cv.wait(lk);
-        pthread_cond_wait(&cv, &mtx);
-        //DEBUG(cout << name << " Buffer[" << ID << "]: Push woke up\n");
-    }
-    buffer[Head] = E;
-    Head = (Head+1) % bufferSize;
-    numElements++;
-    //DEBUG(cout << name << " Buffer[" << ID << "]: Total Elements = " << numElements << flush << "\n");
-    //lk.unlock();
-    pthread_mutex_unlock(&mtx);
-    //cv.notify_one();
-    pthread_cond_signal(&cv);
-    return true;
-}
-
-template <class ElementType>
-ElementType CircularBuffer<ElementType>::pop() {
-    //unique_lock<mutex> lk(mtx);
-    //DEBUG(cout << name << " Buffer[" << ID << "]: Pop\n");
-    pthread_mutex_lock(&mtx);
-    if(Tail == Head) {
-        //DEBUG(cout << name << " Buffer[" << ID << "]: Pop going to sleep ...\n");
-        //cv.wait(lk);
-        pthread_cond_wait(&cv, &mtx);
-        //DEBUG(cout << name << " Buffer[" << ID << "]: Pop woke up\n");
-    }
-    ElementType E = buffer[Tail];
-    Tail = (Tail + 1) % bufferSize;
-    numElements--;
-    //DEBUG(cout << name << " Buffer[" << ID << "]: Total Elements = " << numElements << flush << "\n");
-    //lk.unlock();
-    pthread_mutex_unlock(&mtx);
-    //cv.notify_one();
-    pthread_cond_signal(&cv);
-    return E;
-}
-
-extern "C" {
-// Functions to push and pop values from pipeline buffers
-uint64_t llvm_visc_bufferPop(void*);
-void llvm_visc_bufferPush(void*, uint64_t);
-
-// Functions to create and destroy buffers
-void* llvm_visc_createBindInBuffer(void*, uint64_t, unsigned);
-void* llvm_visc_createBindOutBuffer(void*, uint64_t);
-void* llvm_visc_createEdgeBuffer(void*, uint64_t);
-void* llvm_visc_createLastInputBuffer(void*, uint64_t);
-
-void llvm_visc_freeBuffers(void*);
-
-// Functions to create and destroy threads
-void llvm_visc_createThread(void* graphID, void*(*Func)(void*), void*);
-void llvm_visc_freeThreads(void*);
-
-// Launch API for a streaming graph.
-// Arguments:
-// (1) Launch Function: void* (void*, void*)
-// (2) Push Function:   void (void*, std::vector<uint64_t>**, unsgined)
-// (3) Pop Function:    void* (std::vector<uint64_t>**, unsigned)
-void* llvm_visc_streamLaunch(void(*LaunchFunc)(void*, void*), void*);
-void llvm_visc_streamPush(void* graphID, void* args);
-void* llvm_visc_streamPop(void* graphID);
-void llvm_visc_streamWait(void* graphID);
-
-}
-
-#endif //VISC_RT_HEADER