[4fa73e]: / tensorflow / lib / utils.py

Download this file

98 lines (87 with data), 3.6 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import os
import numpy as np
import tensorflow as tf
#import pdb
F = tf.app.flags.FLAGS
"""
Save tensorflow model
Parameters:
* checkpoint_dir - name of the directory where model is to be saved
* sess - current tensorflow session
* saver - tensorflow saver
"""
def save_model(checkpoint_dir, sess, saver):
model_name = "model.ckpt"
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
saver.save(sess, os.path.join(checkpoint_dir, model_name))
"""
Load tensorflow model
Parameters:
* checkpoint_dir - name of the directory where model is to be loaded from
* sess - current tensorflow session
* saver - tensorflow saver
Returns: True if the model loaded successfully, else False
"""
def load_model(checkpoint_dir, sess, saver):
print(" [*] Reading checkpoints...")
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
saver.restore(sess, os.path.join(checkpoint_dir, ckpt_name))
return True
else:
return False
def compute_weighted_fm_loss(intermediate_layers):
intermediate_layers_unlab, intermediate_layers_fake = intermediate_layers
layers_distance = []
for i in range(len(intermediate_layers_unlab)):
normalized_dist = tf.reduce_mean(tf.abs(tf.reduce_mean(intermediate_layers_unlab[i],0) \
- tf.reduce_mean(intermediate_layers_fake[i],0)))
layers_distance.append(normalized_dist)
layers_distance = tf.stack(layers_distance)
total_distance = tf.reduce_sum(layers_distance)
weights = tf.nn.softmax(-tf.log(layers_distance/total_distance))
loss = tf.reduce_sum(np.multiply(layers_distance, weights))
return loss, weights
"""
To recompose an array of 3D images from patches
"""
def recompose3D_overlap(preds, img_h, img_w, img_d, stride_h, stride_w, stride_d):
patch_h = preds.shape[1]
patch_w = preds.shape[2]
patch_d = preds.shape[3]
N_patches_h = (img_h-patch_h)//stride_h+1
N_patches_w = (img_w-patch_w)//stride_w+1
N_patches_d = (img_d-patch_d)//stride_d+1
N_patches_img = N_patches_h * N_patches_w * N_patches_d
print("N_patches_h: " ,N_patches_h)
print("N_patches_w: " ,N_patches_w)
print("N_patches_d: " ,N_patches_d)
print("N_patches_img: ",N_patches_img)
assert(preds.shape[0]%N_patches_img==0)
N_full_imgs = preds.shape[0]//N_patches_img
print("According to the dimension inserted, there are " \
+str(N_full_imgs) +" full images (of " +str(img_h)+"x" +str(img_w)+"x" +str(img_d) +" each)")
# itialize to zero mega array with sum of Probabilities
raw_pred_martrix = np.zeros((N_full_imgs,img_h,img_w,img_d))
raw_sum = np.zeros((N_full_imgs,img_h,img_w,img_d))
final_matrix = np.zeros((N_full_imgs,img_h,img_w,img_d),dtype='uint16')
k = 0
# iterator over all the patches
for i in range(N_full_imgs):
for h in range((img_h-patch_h)//stride_h+1):
for w in range((img_w-patch_w)//stride_w+1):
for d in range((img_d-patch_d)//stride_d+1):
raw_pred_martrix[i,h*stride_h:(h*stride_h)+patch_h,\
w*stride_w:(w*stride_w)+patch_w,\
d*stride_d:(d*stride_d)+patch_d]+=preds[k]
raw_sum[i,h*stride_h:(h*stride_h)+patch_h,\
w*stride_w:(w*stride_w)+patch_w,\
d*stride_d:(d*stride_d)+patch_d]+=1.0
k+=1
assert(k==preds.shape[0])
#To check for non zero sum matrix
assert(np.min(raw_sum)>=1.0)
final_matrix = np.around(raw_pred_martrix/raw_sum)
return final_matrix