|
a |
|
b/Preprocessing Medical Data Pipeline/main.py |
|
|
1 |
import subprocess |
|
|
2 |
|
|
|
3 |
required_libraries = [ |
|
|
4 |
"numpy", "os", "pydicom", "SimpleITK", "pandas", "trimesh", |
|
|
5 |
"pyntcloud", "scikit-image", "matplotlib", "tqdm", "nibabel", |
|
|
6 |
"re", "scipy", "opencv-python", 'segmentation_models', 'random', |
|
|
7 |
'trimesh' |
|
|
8 |
] |
|
|
9 |
|
|
|
10 |
for lib in required_libraries: |
|
|
11 |
try: |
|
|
12 |
__import__(lib) |
|
|
13 |
except ImportError: |
|
|
14 |
print(f"{lib} is not installed. Installing...") |
|
|
15 |
subprocess.check_call(["pip", "install", lib]) |
|
|
16 |
print(f"{lib} has been installed.") |
|
|
17 |
|
|
|
18 |
|
|
|
19 |
from preprocessing import preprocessing, VisualValidationMSK, transform_string |
|
|
20 |
from image_preprocessing import image_cropping, uniform_cropping, uniform_resizing |
|
|
21 |
from writeout_dataset import Export2CompressedNifiti |
|
|
22 |
from MSKMulticlass import CreateMasks4MulticlassMSK |
|
|
23 |
from createDirectories import createDirectoriesfunc |
|
|
24 |
from preparingTestInstance import preprocessTestScans |
|
|
25 |
from data_augmentation import DataAugmentation |
|
|
26 |
import os |
|
|
27 |
# import tensorflow |
|
|
28 |
# from tensorflow import keras |
|
|
29 |
# from keras.models import load_model |
|
|
30 |
# from keras.utils import to_categorical |
|
|
31 |
import nibabel as nib |
|
|
32 |
import numpy as np |
|
|
33 |
# import segmentation_models as sm |
|
|
34 |
# from keras.metrics import MeanIoU |
|
|
35 |
from skimage.measure import marching_cubes |
|
|
36 |
import matplotlib.pyplot as plt |
|
|
37 |
import trimesh |
|
|
38 |
import SimpleITK as sitk |
|
|
39 |
import nibabel as nib |
|
|
40 |
import numpy as np |
|
|
41 |
|
|
|
42 |
createDirectoriesfunc() |
|
|
43 |
|
|
|
44 |
def Preprocessing(scans_path, scan_data_folders, Cropping): |
|
|
45 |
print('-'*30) |
|
|
46 |
print('Loading and preprocessing training data...') |
|
|
47 |
print('-'*30) |
|
|
48 |
|
|
|
49 |
# /research\resmed202100086-tws ----> Address for raw data |
|
|
50 |
# /research\resabi202200010-Friedlander |
|
|
51 |
total_slices_raw_data = 0 |
|
|
52 |
DataOnlyAOI = False |
|
|
53 |
ExportDatasets = True |
|
|
54 |
multiclass_mask_output_dir = None |
|
|
55 |
|
|
|
56 |
# Only set multiclassSegmentation to True once all the AOI masks are preprocessed or its onto the final AOI |
|
|
57 |
if (Cropping == "256x256"): |
|
|
58 |
Cropping = True |
|
|
59 |
else: |
|
|
60 |
Cropping = False |
|
|
61 |
|
|
|
62 |
scan_data_folders = [scan_data_folders] |
|
|
63 |
|
|
|
64 |
# Creates the preprocessed scan and mask data and exports to Data folder |
|
|
65 |
# Formats binary masks and applies preprocessing steps |
|
|
66 |
segmasks = [] |
|
|
67 |
paitent_id = (scan_data_folders[0].split('_'))[-1] |
|
|
68 |
files = os.listdir(('{}/Raw NIFITI Segmentation Masks (3D Slicer Output)').format(scans_path)) |
|
|
69 |
|
|
|
70 |
for file in files: |
|
|
71 |
file_id = file.split('_')[-1] |
|
|
72 |
file_id = file_id.split('.')[0] |
|
|
73 |
if (file_id == paitent_id): |
|
|
74 |
segmasks.append(file) |
|
|
75 |
print(segmasks) |
|
|
76 |
for segmask in segmasks: |
|
|
77 |
print(segmask) |
|
|
78 |
imgs_train, imgs_mask_train, median_aoi_index = preprocessing(scans_path, segmask, scan_data_folders, total_slices_raw_data, DataOnlyAOI, Cropping) |
|
|
79 |
orientation = (segmask.split('_'))[1] |
|
|
80 |
colab_fname = [transform_string(segmask)] |
|
|
81 |
Export2CompressedNifiti(imgs_train, scans_path, colab_fname, imgs_mask_train, orientation) |
|
|
82 |
|
|
|
83 |
# User Input |
|
|
84 |
# Multi-class mask creation |
|
|
85 |
individual_mask_directory = ('{}/nnUNet Data/masks').format(scans_path) |
|
|
86 |
multiclass_mask_output_dir = ('{}/nnUNet Data/multiclass_masks').format(scans_path) |
|
|
87 |
scan_dir = ('{}/nnUNet Data/scans').format(scans_path) |
|
|
88 |
input_scan_dir = ('{}/nnUNet Data/unprocessed_scans').format(scans_path) |
|
|
89 |
|
|
|
90 |
TIBIA_encoding = 1 |
|
|
91 |
FEMUR_encoding = 2 |
|
|
92 |
FIBULA_encoding = 3 |
|
|
93 |
PELVIS_encoding = 4 |
|
|
94 |
mask_index = int((scan_data_folders[0].split('_'))[0]) |
|
|
95 |
AOIThresholding = True |
|
|
96 |
FriedLanderDataset = False |
|
|
97 |
|
|
|
98 |
print('Multi-Class Segmentation Task Data Preparation!') |
|
|
99 |
CreateMasks4MulticlassMSK(input_scan_dir, scan_dir, individual_mask_directory, mask_index, TIBIA_encoding, FEMUR_encoding, FIBULA_encoding, PELVIS_encoding, multiclass_mask_output_dir, AOIThresholding, FriedLanderDataset) |
|
|
100 |
|
|
|
101 |
|
|
|
102 |
print('-'*30) |
|
|
103 |
print('Completed Preprocessing Stage!') |
|
|
104 |
print('-'*30) |
|
|
105 |
|
|
|
106 |
def preprocessTestScansMain(cutoffSlice, seg_scan_dir, folders): |
|
|
107 |
preprocessTestScans(cutoffSlice, seg_scan_dir, [folders]) |
|
|
108 |
|
|
|
109 |
def VisualiseSlice(basedir, colab_fname, slice_idx_bin): |
|
|
110 |
import nibabel as nib |
|
|
111 |
import numpy as np |
|
|
112 |
import matplotlib.pyplot as plt |
|
|
113 |
def superimpose_images(image1, image2): |
|
|
114 |
image1 = image1 / np.max(image1) |
|
|
115 |
image2 = image2 / np.max(image2) |
|
|
116 |
alpha = 0.5 |
|
|
117 |
superimposed_image = alpha * image1 + (1 - alpha) * image2 |
|
|
118 |
return superimposed_image |
|
|
119 |
|
|
|
120 |
if ((colab_fname.split('_'))[0] != 'msk'): |
|
|
121 |
orientation = (colab_fname.split('_'))[1] |
|
|
122 |
colab_fname = transform_string(colab_fname) |
|
|
123 |
mask_index = '{:03d}'.format(int(((colab_fname).split('_'))[1])) |
|
|
124 |
nii_img_scan = nib.load(('{}/nnUNet Data/scans/msk_{}.nii.gz').format(basedir, mask_index)) |
|
|
125 |
nii_img_mask = nib.load(('{}/nnUNet Data/masks/{}/{}_{}.nii.gz').format(basedir, ((colab_fname).split('_'))[0],colab_fname, orientation)) |
|
|
126 |
|
|
|
127 |
if ((colab_fname.split('_'))[0] == 'msk'): |
|
|
128 |
nii_img_scan = nib.load(('{}/nnUNet Data/scans/{}.nii.gz').format(basedir, colab_fname)) |
|
|
129 |
nii_img_mask = nib.load(('{}/nnUNet Data/multiclass_masks/{}.nii.gz').format(basedir, colab_fname)) |
|
|
130 |
orientation = '_' |
|
|
131 |
|
|
|
132 |
|
|
|
133 |
mask_data = nii_img_mask.get_fdata() |
|
|
134 |
scan_data = nii_img_scan.get_fdata() |
|
|
135 |
image1 = scan_data[int(slice_idx_bin), :, :, 0] |
|
|
136 |
image2 = mask_data[int(slice_idx_bin), :, :, 0] |
|
|
137 |
superimposed_image = superimpose_images(image1, image2) |
|
|
138 |
|
|
|
139 |
plt.imshow(superimposed_image, cmap='gray') |
|
|
140 |
plt.title(('Validating Scan & Mask on Slice {} of {}_{}').format(slice_idx_bin, colab_fname, orientation)) |
|
|
141 |
plt.axis('off') |
|
|
142 |
plt.show() |
|
|
143 |
|
|
|
144 |
plt.imshow(image2, cmap='gray') |
|
|
145 |
plt.title(('Validating Mask on Slice {} of {}_{}').format(slice_idx_bin, colab_fname, orientation)) |
|
|
146 |
plt.axis('off') |
|
|
147 |
plt.show() |
|
|
148 |
|
|
|
149 |
plt.imshow(image1, cmap='gray') |
|
|
150 |
plt.title(('Validating Scan on Slice {} of {}_{}').format(slice_idx_bin, colab_fname, orientation)) |
|
|
151 |
plt.axis('off') |
|
|
152 |
plt.show() |
|
|
153 |
|
|
|
154 |
print('-'*30) |
|
|
155 |
print('Visulisations Generated!') |
|
|
156 |
print('-'*30) |
|
|
157 |
|
|
|
158 |
def DataAug(basedir, aug_fname, num_augs): |
|
|
159 |
imgs_train = nib.load(('{}/nnUNet Data/scans/{}.nii.gz').format(basedir, aug_fname)) |
|
|
160 |
imgs_mask_train = nib.load(('{}/nnUNet Data/multiclass_masks/{}.nii.gz').format(basedir, aug_fname)) |
|
|
161 |
imgs_train = imgs_train.get_fdata() |
|
|
162 |
imgs_mask_train = imgs_mask_train.get_fdata() |
|
|
163 |
num_train = len(imgs_train) |
|
|
164 |
num_augs = int(num_augs) |
|
|
165 |
augmented_images_train, augmented_masks_train = DataAugmentation(imgs_train, imgs_mask_train, num_augs) |
|
|
166 |
|
|
|
167 |
augmented_images_train_sorted, augmented_masks_train_sorted = [], [] |
|
|
168 |
for i in range (int(num_augs)): |
|
|
169 |
augmented_images_train_temp = augmented_images_train[i::num_augs] |
|
|
170 |
augmented_masks_train_temp = augmented_masks_train[i::num_augs] |
|
|
171 |
augmented_images_train_sorted.append(augmented_images_train_temp) |
|
|
172 |
augmented_masks_train_sorted.append(augmented_masks_train_temp) |
|
|
173 |
augmented_images_train_sorted = np.array(augmented_images_train_sorted) |
|
|
174 |
augmented_masks_train_sorted = np.array(augmented_masks_train_sorted) |
|
|
175 |
|
|
|
176 |
print('Augmented Training Scan Shape: ', augmented_images_train_sorted.shape) |
|
|
177 |
print('Augmented Training Mask Shape: ',augmented_masks_train_sorted.shape) |
|
|
178 |
|
|
|
179 |
|
|
|
180 |
for i in range (len(augmented_images_train_sorted)): |
|
|
181 |
temp = np.expand_dims(augmented_images_train_sorted[i], axis=-1) |
|
|
182 |
temp = temp.astype('float32') |
|
|
183 |
np.savetxt('D:\MRI - Tairawhiti (User POV)/train.txt', temp[100,:,:,0], fmt="%d", delimiter=",") |
|
|
184 |
# temp /= 255. # scale scans to [0, 1] |
|
|
185 |
nii_img_train = nib.Nifti1Image(temp, affine=np.eye(4)) |
|
|
186 |
output_file_path = ('{}/nnUNet Data/scans/{}_aug{}.nii.gz').format(basedir, aug_fname, i) |
|
|
187 |
nib.save(nii_img_train, output_file_path) |
|
|
188 |
|
|
|
189 |
temp = np.expand_dims(augmented_masks_train_sorted[i], axis=-1) |
|
|
190 |
temp = temp.astype(int) |
|
|
191 |
np.savetxt('D:\MRI - Tairawhiti (User POV)/train_mask.txt', temp[100,:,:,0], fmt="%d", delimiter=",") |
|
|
192 |
nii_img_mask = nib.Nifti1Image(temp, affine=np.eye(4)) |
|
|
193 |
output_file_path = ('{}/nnUNet Data/multiclass_masks/{}_aug{}.nii.gz').format(basedir, aug_fname, i) |
|
|
194 |
nib.save(nii_img_mask, output_file_path) |
|
|
195 |
|
|
|
196 |
print('-'*30) |
|
|
197 |
print('Data Augmentation Completed & Exported!') |
|
|
198 |
print('-'*30) |
|
|
199 |
|
|
|
200 |
|
|
|
201 |
def AutoSegModel(subjectfname, base_dir): |
|
|
202 |
model_dir = ('{}/Pre-Trained Models (Google Colab)/res34_backbone_20_epochs_dicefocal_256_4P_12batch_maxF1_pelvis_aug_best.hdf5').format(base_dir) |
|
|
203 |
model = load_model(model_dir, compile=False) |
|
|
204 |
|
|
|
205 |
n_classes = 5 |
|
|
206 |
BACKBONE = 'resnet34' |
|
|
207 |
model_used = 'U-Net(resnet34)' |
|
|
208 |
|
|
|
209 |
img_dir = ('{}/nnUNet Data/scans/{}.nii.gz').format(base_dir, subjectfname) |
|
|
210 |
img = nib.load(img_dir) |
|
|
211 |
img_data = img.get_fdata() |
|
|
212 |
X_test = np.repeat(img_data, 3, axis=3) |
|
|
213 |
|
|
|
214 |
print(('Fined-Tuned Model: {}').format((model_dir.split('/'))[-1])) |
|
|
215 |
print('Prediction Scan Stack: ', subjectfname) |
|
|
216 |
print('Number of Segmentation Classes: ', n_classes) |
|
|
217 |
print('\n') |
|
|
218 |
|
|
|
219 |
print("Test Images Shape: ", X_test.shape) |
|
|
220 |
print('\n') |
|
|
221 |
|
|
|
222 |
preprocess_input = sm.get_preprocessing(BACKBONE) |
|
|
223 |
X_test_processed = preprocess_input(X_test) |
|
|
224 |
|
|
|
225 |
# Prediction |
|
|
226 |
y_pred=model.predict(X_test_processed) |
|
|
227 |
y_pred_argmax=np.argmax(y_pred, axis=3) |
|
|
228 |
y_pred_argmax = np.expand_dims(y_pred_argmax, axis = -1) |
|
|
229 |
print('Pred Mask Shape: ', y_pred_argmax.shape) |
|
|
230 |
print("Pred Mask Labels: ", np.unique(y_pred_argmax)) |
|
|
231 |
print('\n') |
|
|
232 |
|
|
|
233 |
combined_mask = y_pred_argmax.astype(np.int32) |
|
|
234 |
combined_img = nib.Nifti1Image(combined_mask, affine=np.eye(4), dtype=np.int32) |
|
|
235 |
nib.save(combined_img, ("{}/{}_pred.nii.gz").format(model_dir, subjectfname), dtype = np.uint8) |
|
|
236 |
print('Exported Prediction Segmentation: ', (("{}/{}_pred.nii.gz").format(model_dir, subjectfname))) |
|
|
237 |
print('\n') |
|
|
238 |
|
|
|
239 |
def Export3DStructure(segmentation_data, predfname, class_msk, model_used): |
|
|
240 |
# Generate a surface mesh using marching cubes |
|
|
241 |
vertices, faces, normals, _ = marching_cubes(segmentation_data, level=0) |
|
|
242 |
|
|
|
243 |
# Create a Trimesh object |
|
|
244 |
mesh = trimesh.Trimesh(vertices=vertices, faces=faces, vertex_normals=normals) |
|
|
245 |
|
|
|
246 |
# Save the mesh as a PLY file |
|
|
247 |
ply_path = ('{}/{}_{}_{}.ply').format(model_dir, predfname, class_msk, model_used) |
|
|
248 |
mesh.export(ply_path) |
|
|
249 |
|
|
|
250 |
segmentation_data_all = y_pred_argmax |
|
|
251 |
segmentation_data_all = segmentation_data_all[:,:,:,0] |
|
|
252 |
tibia_seg_data = np.where(segmentation_data_all != 1, 0, 1) |
|
|
253 |
femur_seg_data = np.where(segmentation_data_all != 2, 0, 2) |
|
|
254 |
fibula_seg_data = np.where(segmentation_data_all != 3, 0, 3) |
|
|
255 |
pelvis_seg_data = np.where(segmentation_data_all != 4, 0, 4) |
|
|
256 |
|
|
|
257 |
segmentation_data = [segmentation_data_all, tibia_seg_data, femur_seg_data, fibula_seg_data, pelvis_seg_data] |
|
|
258 |
class_msk = ['ALL', 'TIBIA', 'FEMUR', 'FIBULA', 'PELVIS'] |
|
|
259 |
|
|
|
260 |
for i in range (len(segmentation_data)): |
|
|
261 |
Export3DStructure(segmentation_data[i], subjectfname, class_msk[i], model_used) |
|
|
262 |
|