Skip to content
Snippets Groups Projects
Commit f9d15930 authored by nz11's avatar nz11
Browse files

Update resnet50_imagenet.py

parent e5dda4e9
No related branches found
No related tags found
No related merge requests found
...@@ -11,12 +11,12 @@ import tensorflow as tf ...@@ -11,12 +11,12 @@ import tensorflow as tf
import keras import keras
from keras.models import Sequential, Model from keras.models import Sequential, Model
from keras.layers import * from keras.layers import *
from keras.applications.resnet50 import ResNet50, preprocess_input
from keras.utils import to_categorical from keras.utils import to_categorical
from keras.applications.resnet50 import ResNet50, preprocess_input
from keras import backend as K from keras import backend as K
from frontend.approxhpvm_translator import translate_to_approxhpvm from frontend.approxhpvm_translator import translate_to_approxhpvm
from frontend.weight_utils import dumpCalibrationData from frontend.weight_utils import dumpCalibrationData2
np.random.seed(2020) np.random.seed(2020)
...@@ -29,29 +29,13 @@ data_format = 'channels_first' ...@@ -29,29 +29,13 @@ data_format = 'channels_first'
IMAGENET_DIR = '/home/nz11/ILSVRC2012/' IMAGENET_DIR = '/home/nz11/ILSVRC2012/'
OUTPUT_DIR = 'data/resnet50_imagenet/' OUTPUT_DIR = 'data/resnet50_imagenet_tune/'
WEIGHTS_PATH = 'data/resnet50_imagenet/weights.h5'
NUM_CLASSES = 100
IMAGES_PER_CLASS = 200
VAL_SIZE = 100
NUM_CLASSES = 200
IMAGES_PER_CLASS = 40
# VAL_SIZE = 100
# def get_resnet50_nchw_keras():
# model = ResNet50()
# for x in model.layers:
# print (x.name)
# x = model.get_layer('flatten_1').output
# x = Dense(1000, name='fc1000')(x)
# x = Activation('softmax')(x)
# model_nchw = Model(model.input, x)
# model_nchw.get_layer('fc1000').set_weights(model.get_layer('fc1000').get_weights())
# return model_nchw
def identity_block(input_tensor, kernel_size, filters, stage, block): def identity_block(input_tensor, kernel_size, filters, stage, block):
...@@ -205,8 +189,8 @@ f.close() ...@@ -205,8 +189,8 @@ f.close()
model = get_resnet50_nchw_keras() model = get_resnet50_nchw_keras()
X_test = [] X_tune, X_test = [], []
y_true = [] y_tune, y_true = [], []
classes = glob.glob(IMAGENET_DIR + 'val/*') classes = glob.glob(IMAGENET_DIR + 'val/*')
...@@ -217,13 +201,21 @@ for c in np.random.permutation(len(classes))[:NUM_CLASSES]: ...@@ -217,13 +201,21 @@ for c in np.random.permutation(len(classes))[:NUM_CLASSES]:
idx = np.random.permutation(len(x)) idx = np.random.permutation(len(x))
idx = idx[:max(len(idx), IMAGES_PER_CLASS)] idx = idx[:max(len(idx), IMAGES_PER_CLASS)]
X_test += list(map(lambda x : load_image(x), x[idx]))
synset = classes[c].split('/')[-1] synset = classes[c].split('/')[-1]
y_true += [synset_to_keras_idx[synset]] * len(x[idx]) images = list(map(lambda x : load_image(x), x[idx]))
labels = [synset_to_keras_idx[synset]] * len(x[idx])
X_test += images[:IMAGES_PER_CLASS // 2]
y_true += labels[:IMAGES_PER_CLASS // 2]
X_tune += images[IMAGES_PER_CLASS // 2:]
y_tune += labels[IMAGES_PER_CLASS // 2:]
X_test = np.array(X_test) X_test = np.array(X_test)
y_true = np.array(y_true) y_true = np.array(y_true)
X_tune = np.array(X_tune)
y_tune = np.array(y_tune)
...@@ -247,7 +239,6 @@ def train_helper(x): ...@@ -247,7 +239,6 @@ def train_helper(x):
train_images = glob.glob(IMAGENET_DIR + 'train/*/*') train_images = glob.glob(IMAGENET_DIR + 'train/*/*')
random.shuffle(train_images) random.shuffle(train_images)
...@@ -258,7 +249,7 @@ dataset = dataset.map( ...@@ -258,7 +249,7 @@ dataset = dataset.map(
) )
dataset = dataset.shuffle(buffer_size=1000) dataset = dataset.shuffle(buffer_size=1000)
dataset = dataset.batch(32) dataset = dataset.batch(64)
dataset = dataset.repeat() dataset = dataset.repeat()
next_element = dataset.make_one_shot_iterator().get_next() next_element = dataset.make_one_shot_iterator().get_next()
...@@ -273,15 +264,22 @@ def generate(): ...@@ -273,15 +264,22 @@ def generate():
model.compile(optimizer=keras.optimizers.Adam(lr=0.00001), loss='categorical_crossentropy', metrics=['acc']) model.compile(optimizer=keras.optimizers.Adam(lr=0.00001), loss='categorical_crossentropy', metrics=['acc'])
if os.path.exists(WEIGHTS_PATH):
model.load_weights(WEIGHTS_PATH)
else:
model.fit_generator(generate(), steps_per_epoch=1000, validation_data=(X_test, to_categorical(y_true, num_classes=1000)), epochs=6)
model.save_weights(OUTPUT_DIR + 'weights.h5')
model.fit_generator(generate(), steps_per_epoch=1000, validation_data=(X_test, to_categorical(y_true, num_classes=1000)), epochs=6) translate_to_approxhpvm(model, OUTPUT_DIR, X_tune, y_tune, 1000)
translate_to_approxhpvm(model, OUTPUT_DIR, X_test[:VAL_SIZE], y_true[:VAL_SIZE], 1000) # dumpCalibrationData2(OUTPUT_DIR + 'test_input_10K.bin', X_test, OUTPUT_DIR + 'test_labels_10K.bin', y_true)
dumpCalibrationData2(OUTPUT_DIR + 'tune_input.bin', X_tune, OUTPUT_DIR + 'tune_labels.bin', y_tune)
dumpCalibrationData2(OUTPUT_DIR + 'test_input.bin', X_test, OUTPUT_DIR + 'test_labels.bin', y_true)
dumpCalibrationData(OUTPUT_DIR + 'test_input.bin', X_test, OUTPUT_DIR + 'test_labels.bin', y_true)
pred = np.argmax(model.predict(X_test), axis=1)
# pred = np.argmax(model.predict(X_test), axis=1) print ('val accuracy', np.sum(pred == y_true.ravel()) / len(X_test))
# print ('val accuracy', np.sum(pred == y_true.ravel()) / len(X_test))
pred = np.argmax(model.predict(X_tune), axis=1)
print ('val accuracy', np.sum(pred == y_tune.ravel()) / len(X_tune))
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment