[602ab8]: / src / enhancement.py

Download this file

110 lines (82 with data), 3.2 kB

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
from __future__ import print_function
import os
import numpy as np
import nibabel as nib
from scipy.signal import medfilt
from multiprocessing import Pool, cpu_count
def create_dir(path):
if not os.path.isdir(path):
os.makedirs(path)
def load_nii(path):
nii = nib.load(path)
return nii.get_data(), nii.get_affine()
def save_nii(data, path, affine):
nib.save(nib.Nifti1Image(data, affine), path)
return
def denoise(volume, kernel_size=3):
return medfilt(volume, kernel_size)
def rescale_intensity(volume, percentils=[0.5, 99.5], bins_num=256):
obj_volume = volume[np.where(volume > 0)]
min_value = np.percentile(obj_volume, percentils[0])
max_value = np.percentile(obj_volume, percentils[1])
if bins_num == 0:
obj_volume = (obj_volume - min_value) / (max_value - min_value).astype(np.float32)
else:
obj_volume = np.round((obj_volume - min_value) / (max_value - min_value) * (bins_num - 1))
obj_volume[np.where(obj_volume < 1)] = 1
obj_volume[np.where(obj_volume > (bins_num - 1))] = bins_num - 1
volume = volume.astype(obj_volume.dtype)
volume[np.where(volume > 0)] = obj_volume
return volume
def equalize_hist(volume, bins_num=256):
obj_volume = volume[np.where(volume > 0)]
hist, bins = np.histogram(obj_volume, bins_num, normed=True)
cdf = hist.cumsum()
cdf = (bins_num - 1) * cdf / cdf[-1]
obj_volume = np.round(np.interp(obj_volume, bins[:-1], cdf)).astype(obj_volume.dtype)
volume[np.where(volume > 0)] = obj_volume
return volume
def unwarp_enhance(arg, **kwarg):
return enhance(*arg, **kwarg)
def enhance(src_path, dst_path, kernel_size=3,
percentils=[0.5, 99.5], bins_num=256, eh=True):
print("Preprocess on: ", src_path)
try:
volume, affine = load_nii(src_path)
volume = denoise(volume, kernel_size)
volume = rescale_intensity(volume, percentils, bins_num)
if eh:
volume = equalize_hist(volume, bins_num)
save_nii(volume, dst_path, affine)
except RuntimeError:
print("\tFailed on: ", src_path)
parent_dir = os.path.dirname(os.getcwd())
data_dir = os.path.join(parent_dir, "data")
data_src_dir = os.path.join(data_dir, "ADNIDenoise")
data_dst_dir = os.path.join(data_dir, "ADNIEnhance")
data_labels = ["AD", "NC"]
create_dir(data_dst_dir)
data_src_paths, data_dst_paths = [], []
for label in data_labels:
src_label_dir = os.path.join(data_src_dir, label)
dst_label_dir = os.path.join(data_dst_dir, label)
create_dir(dst_label_dir)
for subject in os.listdir(src_label_dir):
data_src_paths.append(os.path.join(src_label_dir, subject))
data_dst_paths.append(os.path.join(dst_label_dir, subject))
kernel_size = 3
percentils = [0.5, 99.5]
bins_num = 256
eh = True
# Test
# enhance(data_src_paths[0], data_dst_paths[0],
# kernel_size, percentils, bins_num, eh)
# Multi-processing
subj_num = len(data_src_paths)
paras = zip(data_src_paths, data_dst_paths,
[kernel_size] * subj_num,
[percentils] * subj_num,
[bins_num] * subj_num,
[eh] * subj_num)
pool = Pool(processes=cpu_count())
pool.map(unwarp_enhance, paras)