|
a |
|
b/seg_deploy.py |
|
|
1 |
import csv |
|
|
2 |
caffe_root = '/home/toanhoi/caffe/build/tools/caffe/' |
|
|
3 |
import sys |
|
|
4 |
import os |
|
|
5 |
sys.path.insert(0, caffe_root + 'python') |
|
|
6 |
os.environ["GLOG_minloglevel"] = "1" |
|
|
7 |
import caffe |
|
|
8 |
import argparse |
|
|
9 |
import numpy as np |
|
|
10 |
from medpy.io import load, save |
|
|
11 |
|
|
|
12 |
#0: Background (everything outside the brain) |
|
|
13 |
#10: Cerebrospinal fluid (CSF) |
|
|
14 |
#150: Gray matter (GM) |
|
|
15 |
#250: White matter (WM) |
|
|
16 |
def convert_label_submit(label_img): |
|
|
17 |
label_processed=np.zeros(label_img.shape[0:]).astype(np.uint8) |
|
|
18 |
for i in range(label_img.shape[2]): |
|
|
19 |
label_slice=label_img[:, :, i] |
|
|
20 |
label_slice[label_slice == 1] = 10 |
|
|
21 |
label_slice[label_slice == 2] = 150 |
|
|
22 |
label_slice[label_slice == 3] = 250 |
|
|
23 |
label_processed[:, :, i]=label_slice |
|
|
24 |
return label_processed |
|
|
25 |
|
|
|
26 |
def convert_label(label_img): |
|
|
27 |
label_processed=np.zeros(label_img.shape[0:]).astype(np.uint8) |
|
|
28 |
for i in range(label_img.shape[2]): |
|
|
29 |
label_slice=label_img[:, :, i] |
|
|
30 |
label_slice[label_slice == 10] = 1 |
|
|
31 |
label_slice[label_slice == 150] = 2 |
|
|
32 |
label_slice[label_slice == 250] = 3 |
|
|
33 |
label_processed[:, :, i]=label_slice |
|
|
34 |
return label_processed |
|
|
35 |
#Reference https://github.com/ginobilinie/infantSeg |
|
|
36 |
def dice(im1, im2,tid): |
|
|
37 |
im1=im1==tid |
|
|
38 |
im2=im2==tid |
|
|
39 |
im1=np.asarray(im1).astype(np.bool) |
|
|
40 |
im2=np.asarray(im2).astype(np.bool) |
|
|
41 |
if im1.shape != im2.shape: |
|
|
42 |
raise ValueError("Shape mismatch: im1 and im2 must have the same shape.") |
|
|
43 |
# Compute Dice coefficient |
|
|
44 |
intersection = np.logical_and(im1, im2) |
|
|
45 |
dsc=2. * intersection.sum() / (im1.sum() + im2.sum()) |
|
|
46 |
return dsc |
|
|
47 |
|
|
|
48 |
if __name__ == "__main__": |
|
|
49 |
parser = argparse.ArgumentParser(description='makes a plot from Caffe output') |
|
|
50 |
parser.add_argument("-start") |
|
|
51 |
parser.add_argument("-end") |
|
|
52 |
|
|
|
53 |
if (os.environ.get('CAFFE_CPU_MODE')): |
|
|
54 |
caffe.set_mode_cpu() |
|
|
55 |
else: |
|
|
56 |
caffe.set_mode_gpu() |
|
|
57 |
|
|
|
58 |
data_path = '/media/toanhoi/Study/databaseSeg/ISeg/iSeg-2017-Training' |
|
|
59 |
|
|
|
60 |
subject_id=9 |
|
|
61 |
subject_name = 'subject-%d-' % subject_id |
|
|
62 |
|
|
|
63 |
f_T1 = os.path.join(data_path, subject_name + 'T1.hdr') |
|
|
64 |
inputs_T1, header_T1 = load(f_T1) |
|
|
65 |
inputs_T1 = inputs_T1.astype(np.float32) |
|
|
66 |
|
|
|
67 |
f_T2 = os.path.join(data_path, subject_name + 'T2.hdr') |
|
|
68 |
inputs_T2, header_T2 = load(f_T2) |
|
|
69 |
inputs_T2 = inputs_T2.astype(np.float32) |
|
|
70 |
|
|
|
71 |
f_l = os.path.join(data_path, subject_name + 'label.hdr') |
|
|
72 |
labels, header_label = load(f_l) |
|
|
73 |
labels = labels.astype(np.uint8) |
|
|
74 |
labels=convert_label(labels) |
|
|
75 |
|
|
|
76 |
mask = inputs_T1 > 0 |
|
|
77 |
mask = mask.astype(np.bool) |
|
|
78 |
# ======================normalize to 0 mean and 1 variance==== |
|
|
79 |
# Normalization |
|
|
80 |
inputs_T1_norm =(inputs_T1 - inputs_T1[mask].mean()) / inputs_T1[mask].std() |
|
|
81 |
inputs_T2_norm = (inputs_T2 - inputs_T2[mask].mean()) / inputs_T2[mask].std() |
|
|
82 |
|
|
|
83 |
inputs_T1_norm = inputs_T1_norm[:, :, :, None] |
|
|
84 |
inputs_T2_norm = inputs_T2_norm[:, :, :, None] |
|
|
85 |
|
|
|
86 |
inputs = np.concatenate((inputs_T1_norm, inputs_T2_norm), axis=3) |
|
|
87 |
inputs = inputs[None, :, :, :, :] |
|
|
88 |
inputs = inputs.transpose(0, 4, 3, 1, 2) |
|
|
89 |
num_class=4 |
|
|
90 |
num_paches=0 |
|
|
91 |
|
|
|
92 |
model_def='./deploy_3d_denseseg.prototxt' |
|
|
93 |
model_weights = "./snapshot/3d_denseseg_iseg_iter_200000.caffemodel" |
|
|
94 |
net = caffe.Net(model_def, model_weights,caffe.TEST) |
|
|
95 |
patch_input = [64, 64, 64] |
|
|
96 |
xstep = 16 |
|
|
97 |
ystep = 8#16 |
|
|
98 |
zstep = 16#16 |
|
|
99 |
deep_slices = np.arange(patch_input[0] // 2, inputs.shape[2] - patch_input[0] // 2 + xstep, xstep) |
|
|
100 |
height_slices = np.arange(patch_input[1] // 2, inputs.shape[3] - patch_input[1] // 2 + ystep, ystep) |
|
|
101 |
width_slices = np.arange(patch_input[2] // 2, inputs.shape[4] - patch_input[2] // 2 + zstep, zstep) |
|
|
102 |
output = np.zeros((num_class,) + inputs.shape[2:]) |
|
|
103 |
count_used=np.zeros((inputs.shape[2],inputs.shape[3],inputs.shape[4]))+1e-5 |
|
|
104 |
|
|
|
105 |
total_patch=len(deep_slices)*len(height_slices)*len(width_slices) |
|
|
106 |
for i in range(len(deep_slices)): |
|
|
107 |
for j in range(len(height_slices)): |
|
|
108 |
for k in range(len(width_slices)): |
|
|
109 |
num_paches=num_paches+1 |
|
|
110 |
deep = deep_slices[i] |
|
|
111 |
height = height_slices[j] |
|
|
112 |
width = width_slices[k] |
|
|
113 |
raw_patches= inputs[:,:,deep - patch_input[0] // 2:deep + patch_input[0] // 2, |
|
|
114 |
height - patch_input[1] // 2:height + patch_input[1] // 2, |
|
|
115 |
width - patch_input[2] // 2:width + patch_input[2] // 2] |
|
|
116 |
print "Processed: ",num_paches ,"/", total_patch |
|
|
117 |
net.blobs['data'].data[...] = raw_patches |
|
|
118 |
net.forward() |
|
|
119 |
|
|
|
120 |
#Major voting https://github.com/ginobilinie/infantSeg |
|
|
121 |
temp_predic=net.blobs['softmax'].data[0].argmax(axis=0) |
|
|
122 |
for labelInd in range(4): # note, start from 0 |
|
|
123 |
currLabelMat = np.where(temp_predic == labelInd, 1, 0) # true, vote for 1, otherwise 0 |
|
|
124 |
output[labelInd, deep - patch_input[0] // 2:deep + patch_input[0] // 2, |
|
|
125 |
height - patch_input[1] // 2:height + patch_input[1] // 2, |
|
|
126 |
width - patch_input[2] // 2:width + patch_input[2] // 2] += currLabelMat |
|
|
127 |
#Average |
|
|
128 |
# output[slice(None),deep - patch_input[0] // 2:deep + patch_input[0] // 2, |
|
|
129 |
# height - patch_input[1] // 2:height + patch_input[1] // 2, |
|
|
130 |
# width - patch_input[2] // 2:width + patch_input[2] // 2]+=net.blobs['softmax'].data[0] |
|
|
131 |
|
|
|
132 |
count_used[deep - patch_input[0] // 2:deep + patch_input[0] // 2, |
|
|
133 |
height - patch_input[1] // 2:height + patch_input[1] // 2, |
|
|
134 |
width - patch_input[2] // 2:width + patch_input[2] // 2]+=1 |
|
|
135 |
|
|
|
136 |
output=output/count_used |
|
|
137 |
y = np.argmax(output, axis=0) |
|
|
138 |
out_label=y.transpose(1,2,0) |
|
|
139 |
dsc_0 = dice(out_label , labels, 0) |
|
|
140 |
dsc_1 = dice(out_label , labels, 1) |
|
|
141 |
dsc_2 = dice(out_label , labels, 2) |
|
|
142 |
dsc_3 = dice(out_label , labels, 3) |
|
|
143 |
dsc = np.mean([dsc_1, dsc_2, dsc_3]) # ignore Background |
|
|
144 |
print dsc_1, dsc_2, dsc_3, dsc |
|
|
145 |
with open('result_3d_dense_seg.csv', 'a+') as csvfile: |
|
|
146 |
datacsv = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL) |
|
|
147 |
datacsv.writerow([dsc_1, dsc_2, dsc_3, dsc]) |
|
|
148 |
|
|
|
149 |
out_label=out_label.astype(np.uint8) |
|
|
150 |
out_label = convert_label_submit(out_label) |
|
|
151 |
save(out_label, '{}/{}'.format("./", "3d_dense_seg_result.hdr"), header_T1) |