|
@@ -6,7 +6,8 @@
|
|
|
'''
|
|
|
|
|
|
import scipy.misc as misc
|
|
|
-import cPickle as pickle
|
|
|
+# import cPickle as pickle
|
|
|
+import _pickle as cPickle
|
|
|
import tensorflow as tf
|
|
|
from tqdm import tqdm
|
|
|
import numpy as np
|
|
@@ -24,10 +25,8 @@ slim = tf.contrib.slim
|
|
|
Recursively obtains all images in the directory specified
|
|
|
|
|
|
'''
|
|
|
-def getPaths(data_dir):
|
|
|
-
|
|
|
+def getPaths(data_dir):
|
|
|
image_paths = []
|
|
|
-
|
|
|
# add more extensions if need be
|
|
|
ps = ['jpg', 'jpeg', 'JPG', 'JPEG', 'bmp', 'BMP', 'png', 'PNG']
|
|
|
for p in ps:
|
|
@@ -50,6 +49,8 @@ if __name__ == '__main__':
|
|
|
model = a.model
|
|
|
checkpoint_file = a.checkpoint_file
|
|
|
|
|
|
+ print( data_dir, model, checkpoint_file )
|
|
|
+
|
|
|
# I only have these because I thought some take in size of (299,299), but maybe not
|
|
|
if 'inception' in model: height, width, channels = 224, 224, 3
|
|
|
if 'resnet' in model: height, width, channels = 224, 224, 3
|
|
@@ -109,13 +110,14 @@ if __name__ == '__main__':
|
|
|
logits, end_points = vgg_19(x, is_training=False)
|
|
|
features = end_points['vgg_19/fc8']
|
|
|
|
|
|
+ print('init features...')
|
|
|
sess = tf.Session()
|
|
|
saver = tf.train.Saver()
|
|
|
saver.restore(sess, checkpoint_file)
|
|
|
|
|
|
feat_dict = {}
|
|
|
paths = getPaths(data_dir)
|
|
|
- print 'Computing features...'
|
|
|
+ print('Computing features...')
|
|
|
for path in tqdm(paths):
|
|
|
image = misc.imread(path)
|
|
|
image = misc.imresize(image, (height, width))
|