From cd37699cb7fdcf6cf4a37a75a871261d45690bfb Mon Sep 17 00:00:00 2001
From: Abdul Rafae Noor <arnoor2@tyler.cs.illinois.edu>
Date: Wed, 3 Feb 2021 11:30:57 -0600
Subject: [PATCH] Updating Keras frontend examples to use model_params data

---
 hpvm/projects/keras/src/alexnet.py           | 21 ++++++++++-----
 hpvm/projects/keras/src/alexnet2.py          | 22 ++++++++++------
 hpvm/projects/keras/src/mobilenet_cifar10.py | 20 ++++++++++-----
 hpvm/projects/keras/src/resnet18_cifar10.py  | 20 +++++----------
 hpvm/projects/keras/src/vgg16_cifar10.py     | 24 ++++++++++-------
 hpvm/projects/keras/src/vgg16_cifar100.py    | 27 ++++++++++++--------
 6 files changed, 78 insertions(+), 56 deletions(-)

diff --git a/hpvm/projects/keras/src/alexnet.py b/hpvm/projects/keras/src/alexnet.py
index 4b23fd995f..d611fc3f80 100644
--- a/hpvm/projects/keras/src/alexnet.py
+++ b/hpvm/projects/keras/src/alexnet.py
@@ -69,18 +69,25 @@ class AlexNet_CIFAR10(Benchmark):
         mean = np.mean(X_train)
         std = np.std(X_train)
         X_train = (X_train - mean) / (std + 1e-7)
-        X_val = (X_val - mean) / (std + 1e-7)  
+        X_val = (X_val - mean) / (std + 1e-7)
 
-        X_test = X_val[0:5000]
-        y_test = y_val[0:5000]
-        X_tuner = X_val[5000:]
-        y_tuner = y_val[5000:]
+
+        X_test = np.fromfile(MODEL_PARAMS_DIR + '/alexnet_cifar10/test_input.bin', dtype=np.float32)
+        y_test = np.fromfile(MODEL_PARAMS_DIR + '/alexnet_cifar10/test_labels.bin', dtype=np.uint32)
+
+        X_test = X_test.reshape((-1,3,32,32))
+
+
+        X_tuner = np.fromfile(MODEL_PARAMS_DIR + '/alexnet_cifar10/tune_input.bin', dtype=np.float32)
+        y_tuner = np.fromfile(MODEL_PARAMS_DIR + '/alexnet_cifar10/tune_labels.bin', dtype=np.uint32)
+
+        X_tuner = X_tuner.reshape((-1,3,32,32))
 
         return X_train, y_train, X_test, y_test, X_tuner, y_tuner
-    
+
 
     def trainModel(self, model, X_train, y_train, X_test, y_test):
-        
+
         y_train = to_categorical(y_train, self.num_classes)
         y_test = to_categorical(y_test, self.num_classes)
 
diff --git a/hpvm/projects/keras/src/alexnet2.py b/hpvm/projects/keras/src/alexnet2.py
index de69d8c129..9c6c9ec621 100644
--- a/hpvm/projects/keras/src/alexnet2.py
+++ b/hpvm/projects/keras/src/alexnet2.py
@@ -68,21 +68,27 @@ class AlexNet2_CIFAR10(Benchmark):
         mean = np.mean(X_train)
         std = np.std(X_train)
         X_train = (X_train - mean) / (std + 1e-7)
-        X_val = (X_val - mean) / (std + 1e-7)  
+        X_val = (X_val - mean) / (std + 1e-7)
 
-        X_test = X_val[0:5000]
-        y_test = y_val[0:5000]
-        X_tuner = X_val[5000:]
-        y_tuner = y_val[5000:]
+        X_test = np.fromfile(MODEL_PARAMS_DIR + '/alexnet2_cifar10/test_input.bin', dtype=np.float32)
+        y_test = np.fromfile(MODEL_PARAMS_DIR + '/alexnet2_cifar10/test_labels.bin', dtype=np.uint32)
+
+        X_test = X_test.reshape((-1,3,32,32))
+
+
+        X_tuner = np.fromfile(MODEL_PARAMS_DIR + '/alexnet2_cifar10/tune_input.bin', dtype=np.float32)
+        y_tuner = np.fromfile(MODEL_PARAMS_DIR + '/alexnet2_cifar10/tune_labels.bin', dtype=np.uint32)
+
+        X_tuner = X_tuner.reshape((-1,3,32,32))
 
         return X_train, y_train, X_test, y_test, X_tuner, y_tuner
-    
+
 
     def trainModel(self, model, X_train, y_train, X_test, y_test):
-                
+
         y_train = to_categorical(y_train, self.num_classes)
         y_test = to_categorical(y_test, self.num_classes)
-        
+
         model.compile(
             loss='categorical_crossentropy',
             optimizer=Adam(lr=0.0001),
diff --git a/hpvm/projects/keras/src/mobilenet_cifar10.py b/hpvm/projects/keras/src/mobilenet_cifar10.py
index 367a4dfc62..c1ea50c109 100644
--- a/hpvm/projects/keras/src/mobilenet_cifar10.py
+++ b/hpvm/projects/keras/src/mobilenet_cifar10.py
@@ -110,21 +110,27 @@ class MobileNet_CIFAR10(Benchmark):
         mean = np.mean(X_train)
         std = np.std(X_train)
         X_train = (X_train - mean) / (std + 1e-7)
-        X_val = (X_val - mean) / (std + 1e-7)  
+        X_val = (X_val - mean) / (std + 1e-7)
+
+        X_test = np.fromfile(MODEL_PARAMS_DIR + '/mobilenet_cifar10/test_input.bin', dtype=np.float32)
+        y_test= np.fromfile(MODEL_PARAMS_DIR + '/mobilenet_cifar10/test_labels.bin', dtype=np.uint32)
+
+        X_test = X_test.reshape((-1,3,32,32))
+
+        X_tuner = np.fromfile(MODEL_PARAMS_DIR + '/mobilenet_cifar10/tune_input.bin', dtype=np.float32)
+        y_tuner = np.fromfile(MODEL_PARAMS_DIR + '/mobilenet_cifar10/tune_labels.bin', dtype=np.uint32)
+
+        X_tuner = X_tune.reshape((-1,3,32,32))
 
-        X_test = X_val[0:5000]
-        y_test = y_val[0:5000]
-        X_tuner = X_val[5000:]
-        y_tuner = y_val[5000:]
 
         return X_train, y_train, X_test, y_test, X_tuner, y_tuner
-    
+
 
     def trainModel(self, model, X_train, y_train, X_test, y_test):
 
         y_train = to_categorical(y_train, self.num_classes)
         y_test = to_categorical(y_test, self.num_classes)
-        
+
         # data augmentation, horizontal flips only
         datagen = ImageDataGenerator(
                 featurewise_center=False,
diff --git a/hpvm/projects/keras/src/resnet18_cifar10.py b/hpvm/projects/keras/src/resnet18_cifar10.py
index 74abc7ad9f..266f00dfa9 100644
--- a/hpvm/projects/keras/src/resnet18_cifar10.py
+++ b/hpvm/projects/keras/src/resnet18_cifar10.py
@@ -448,24 +448,16 @@ class ResNet18_CIFAR10(Benchmark):
         X_val = (X_val - mean)
 
 
-        X_test_val = np.fromfile(MODEL_PARAMS_DIR + '/resnet18_cifar10/test_input.bin', dtype=np.float32)
-        Y_test_val = np.fromfile(MODEL_PARAMS_DIR + '/resnet18_cifar10/test_labels.bin', dtype=np.uint32)
+        X_test = np.fromfile(MODEL_PARAMS_DIR + '/resnet18_cifar10/test_input.bin', dtype=np.float32)
+        y_test = np.fromfile(MODEL_PARAMS_DIR + '/resnet18_cifar10/test_labels.bin', dtype=np.uint32)
 
-        X_test_val = X_test_val.reshape((-1,3,32,32))
+        X_test = X_test.reshape((-1,3,32,32))
 
 
-        X_tune_val = np.fromfile(MODEL_PARAMS_DIR + '/resnet18_cifar10/tune_input.bin', dtype=np.float32)
-        Y_tune_val = np.fromfile(MODEL_PARAMS_DIR + '/resnet18_cifar10/tune_labels.bin', dtype=np.uint32)
-
-        X_tune_val = X_tune_val.reshape((-1,3,32,32))
-
-
-        X_test = X_test_val[:5000]
-        y_test= Y_test_val[:5000]
-
-        X_tuner = X_tune_val[:5000]
-        y_tuner = Y_tune_val[:5000]
+        X_tuner = np.fromfile(MODEL_PARAMS_DIR + '/resnet18_cifar10/tune_input.bin', dtype=np.float32)
+        y_tuner = np.fromfile(MODEL_PARAMS_DIR + '/resnet18_cifar10/tune_labels.bin', dtype=np.uint32)
 
+        X_tuner = X_tuner.reshape((-1,3,32,32))
 
         return X_train, y_train, X_test, y_test, X_tuner, y_tuner
 
diff --git a/hpvm/projects/keras/src/vgg16_cifar10.py b/hpvm/projects/keras/src/vgg16_cifar10.py
index 873e23b766..3870aa6dc9 100644
--- a/hpvm/projects/keras/src/vgg16_cifar10.py
+++ b/hpvm/projects/keras/src/vgg16_cifar10.py
@@ -108,29 +108,35 @@ class VGG16_CIFAR10(Benchmark):
         mean = np.mean(X_train)
         std = np.std(X_train)
         X_train = (X_train - mean) / (std + 1e-7)
-        X_val = (X_val - mean) / (std + 1e-7)  
+        X_val = (X_val - mean) / (std + 1e-7)
+
+        X_test= np.fromfile(MODEL_PARAMS_DIR + '/vgg16_cifar10/test_input.bin', dtype=np.float32)
+        y_test = np.fromfile(MODEL_PARAMS_DIR + '/vgg16_cifar10/test_labels.bin', dtype=np.uint32)
+
+        X_test = X_test.reshape((-1,3,32,32))
+
+        X_tuner= np.fromfile(MODEL_PARAMS_DIR + '/vgg16_cifar10/tune_input.bin', dtype=np.float32)
+        y_tuner = np.fromfile(MODEL_PARAMS_DIR + '/vgg16_cifar10/tune_labels.bin', dtype=np.uint32)
+
+        X_tuner = X_tuner.reshape((-1,3,32,32))
 
-        X_test = X_val[0:5000]
-        y_test = y_val[0:5000]
-        X_tuner = X_val[5000:]
-        y_tuner = y_val[5000:]
 
         return X_train, y_train, X_test, y_test, X_tuner, y_tuner
 
-    
+
     def trainModel(self, model, X_train, y_train, X_test, y_test):
 
         y_train = to_categorical(y_train, self.num_classes)
         y_test = to_categorical(y_test, self.num_classes)
-        
+
         batch_size = 128
         learning_rate = 0.01
         lr_drop = 20
 
-        
+
         def lr_scheduler(epoch):
             return learning_rate * (0.5 ** (epoch // lr_drop))
-        
+
         reduce_lr = keras.callbacks.LearningRateScheduler(lr_scheduler)
 
         #data augmentation
diff --git a/hpvm/projects/keras/src/vgg16_cifar100.py b/hpvm/projects/keras/src/vgg16_cifar100.py
index 03bb852e00..d605e05bd9 100644
--- a/hpvm/projects/keras/src/vgg16_cifar100.py
+++ b/hpvm/projects/keras/src/vgg16_cifar100.py
@@ -124,29 +124,34 @@ class VGG16_CIFAR100(Benchmark):
         mean = np.mean(X_train)
         std = np.std(X_train)
         X_train = (X_train - mean) / (std + 1e-7)
-        X_val = (X_val - mean) / (std + 1e-7)  
+        X_val = (X_val - mean) / (std + 1e-7)
 
-        X_test = X_val[0:5000]
-        y_test = y_val[0:5000]
-        X_tuner = X_val[5000:]
-        y_tuner = y_val[5000:]
+        X_test = np.fromfile(MODEL_PARAMS_DIR + '/vgg16_cifar100/test_input.bin', dtype=np.float32)
+        y_test = np.fromfile(MODEL_PARAMS_DIR + '/vgg16_cifar100/test_labels.bin', dtype=np.uint32)
+
+        X_test = X_test.reshape((-1,3,32,32))
+
+        X_tuner = np.fromfile(MODEL_PARAMS_DIR + '/vgg16_cifar100/tuner_input.bin', dtype=np.float32)
+        y_tuner = np.fromfile(MODEL_PARAMS_DIR + '/vgg16_cifar100/tuner_labels.bin', dtype=np.uint32)
+
+        X_tuner = X_tuner.reshape((-1,3,32,32))
 
         return X_train, y_train, X_test, y_test, X_tuner, y_tuner
-    
-    
+
+
     def trainModel(self,model, X_train, y_train, X_test, y_test):
 
         y_train = to_categorical(y_train, self.num_classes)
         y_test = to_categorical(y_test, self.num_classes)
-        
+
         batch_size = 128
         learning_rate = 0.1
         lr_drop = 30
-        
- 
+
+
         def lr_scheduler(epoch):
             return learning_rate * (0.5 ** (epoch // lr_drop))
-        
+
         reduce_lr = keras.callbacks.LearningRateScheduler(lr_scheduler)
 
         #data augmentation
-- 
GitLab