[e9500f]: / pathflowai / stain_norm.py

Download this file

66 lines (59 with data), 2.8 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import cv2
import sys
import fire
import histomicstk
import histomicstk as htk
import openslide
import dask
import tqdm
import numpy as np
from dask.diagnostics import ProgressBar
from pathflowai.utils import generate_tissue_mask
from histomicstk.preprocessing.color_normalization.\
deconvolution_based_normalization import deconvolution_based_normalization
W_target = np.array([
[0.6185391, 0.1576997, -0.01119131],
[0.7012888, 0.8638838, 0.45586256],
[0.3493163, 0.4657428, -0.85597752]
])
def return_norm_image(img,mask,W_source=None,W_target=None):
img=deconvolution_based_normalization(
img, W_source=W_source, W_target=W_target, im_target=None,
stains=['hematoxylin', 'eosin'], mask_out=~mask,
stain_unmixing_routine_params={"I_0":215})
return img
def check_ext(image_file):
return any([image_file.endswith(ext) for ext in ['.svs','.png','.jpg','.jpeg','.tiff','.tif']])
def stain_norm(image_file,compression=10,patch_size=1024):
if check_ext(image_file):
img = openslide.open_slide(image_file)
image = np.array(img.read_region((0,0), 0, img.level_dimensions[0]))[...,:3]
elif image_file.endswith(".npy"):
image=np.load(image_file)
else: raise NotImplementedError
mask=generate_tissue_mask(image,compression=compression,keep_holes=False)
img_small=cv2.resize(image,None,fx=1/compression,fy=1/compression)
mask_small=cv2.resize(mask.astype(int),None,fx=1/compression,fy=1/compression,interpolation=cv2.INTER_NEAREST).astype(bool)
W_source = htk.preprocessing.color_deconvolution.rgb_separate_stains_macenko_pca(img_small, 215)
W_source = htk.preprocessing.color_deconvolution._reorder_stains(W_source)
res=[]
coords=[]
for i in np.arange(0,image.shape[0]-patch_size,patch_size):
for j in np.arange(0,image.shape[1]-patch_size,patch_size):
if mask[i:i+patch_size,j:j+patch_size].mean():
coords.append((i,j))
res.append(dask.delayed(return_norm_image)(image[i:i+patch_size,j:j+patch_size],mask[i:i+patch_size,j:j+patch_size],W_source,W_target))
with ProgressBar():
res_returned=dask.compute(*res,scheduler="processes")
img_new=np.ones(image.shape).astype(np.uint8)*255
for k in tqdm.trange(len(coords)):
i,j=coords[k]
img_new[i:i+patch_size,j:j+patch_size]=res_returned[k]
return img_new
def stain_norm_pipeline(image_file="stain_in.svs",
npy_out='stain_out.npy',
compression=10,
patch_size=1024):
np.save(npy_out,stain_norm(image_file,compression,patch_size))
if __name__=="__main__":
fire.Fire(stain_norm_pipeline)