Switch to unified view

a b/10x/reference/Analysis-10.py
1
# -*- coding: utf-8 -*-
2
# <nbformat>3.0</nbformat>
3
4
# <codecell>
5
6
import os
7
8
import numpy as np
9
10
import skimage as ski
11
from skimage import io, filter, color, exposure, morphology, feature, draw, measure, transform
12
13
from sklearn.linear_model import BayesianRidge
14
from sklearn.mixture import GMM
15
16
from scipy import spatial, ndimage, signal, stats
17
18
import matplotlib
19
%matplotlib inline
20
import matplotlib.pyplot as plt
21
import seaborn
22
matplotlib.rcParams["figure.figsize"] = (16, 10)
23
24
#%matplotlib inline
25
26
27
# WANT
28
# - Density outside inflammatory zone
29
# - Area, width, length of focus
30
# - Area of inflammation / area of vein
31
32
# <codecell>
33
34
# Load filenames
35
dirq = "/Users/qcaudron/repositories/Quantitative-Histology/10x/data/"
36
files = []
37
for i in os.listdir(dirq) :
38
    if i.endswith(".jpg") :
39
        files.append(dirq + i)
40
        
41
42
offset = 1000
43
lengthscale = 301
44
45
46
47
image = exposure.equalize_adapthist(io.imread(files[4]))
48
#image = io.imread(files[4])
49
io.imshow(image)
50
plt.grid(False)
51
52
# <codecell>
53
54
#binary = np.logical_or(filter.threshold_adaptive(exposure.adjust_sigmoid(image[:, :, 0], cutoff=0.4, gain=20), lengthscale), \
55
#                       filter.threshold_adaptive(exposure.adjust_sigmoid(image[:, :, 2], cutoff=0.5, gain=20), lengthscale))
56
57
binary = filter.threshold_adaptive(exposure.adjust_sigmoid(image[:, :, 0], cutoff=0.4, gain = 30), 301).astype(bool)
58
clean = morphology.binary_closing(binary, morphology.disk(3)).astype(bool)
59
clean = morphology.remove_small_objects(clean, 200)
60
clean = morphology.remove_small_objects( (1-clean).astype(bool), 200)
61
62
63
64
io.imshow(clean)
65
plt.grid(False)
66
67
68
# <codecell>
69
70
(xdim, ydim, _) = image.shape
71
xdim /= 10
72
ydim /= 10
73
74
local_density = filter.gaussian_filter(clean, 61)
75
76
fig, ax = plt.subplots()
77
78
io.imshow(local_density)
79
80
#for n, c in enumerate(measure.find_contours(local_density, local_density.max()/2)) :
81
#    ax.plot(c[:, 1], c[:, 0], linewidth=2)
82
83
plt.grid(False)
84
85
# <codecell>
86
87
88
ent = filter.gaussian_filter(filter.rank.entropy(local_density, morphology.disk(3)), 75)
89
90
ent -= ent.min()
91
ent /= ent.max()
92
93
local_density -= local_density.min()
94
local_density /= local_density.max()
95
96
io.imshow(ent)
97
plt.grid(False)
98
99
# <codecell>
100
101
info = ent * (1 + local_density)
102
103
io.imshow(info)
104
plt.grid(False)
105
106
# <codecell>
107
108
bw = (info) > filter.threshold_otsu(info)
109
#bw = morphology.dilation(bw, morphology.disk(21))
110
111
112
fig, ax = plt.subplots()
113
114
io.imshow(image)
115
C = measure.find_contours(bw, 0.5)
116
centroid = []
117
vals = []
118
for c in C :
119
    ax.plot(c[:, 1], c[:, 0], lw=5)
120
    centroid.append(np.linalg.norm([c[:, 1].mean() - bw.shape[1] / 2, c[:, 0].mean() - bw.shape[0] / 2]))
121
    vals.append(local_density.T[c.astype(int)].sum())
122
123
cent = C[np.argmin(centroid / np.array(vals))]
124
    
125
ax.plot(cent[:, 1], cent[:, 0], lw=5, c="k", alpha = 0.7)
126
127
plt.grid(False)
128
129
# <codecell>
130
131
plt.scatter(centroid, centroid / np.array(vals))
132
#ax = plt.gca()
133
#ax.set_yscale("log")
134
#ax.set_ylim([1e-9, 1e-5])
135
136
print 1. / np.array(vals)
137
138
# <codecell>
139
140
# DO NOT RUN ME
141
142
for f in files :
143
    image = exposure.equalize_adapthist(io.imread(f))
144
    binary = filter.threshold_adaptive(exposure.adjust_sigmoid(image[:, :, 0], cutoff=0.4, gain = 30), 301).astype(bool)
145
    clean = morphology.binary_closing(binary, morphology.disk(3)).astype(bool)
146
    clean = morphology.remove_small_objects(clean, 200)
147
    clean = morphology.remove_small_objects( (1-clean).astype(bool), 200)
148
    local_density = filter.gaussian_filter(clean, 61)
149
    ent = filter.gaussian_filter(filter.rank.entropy(local_density, morphology.disk(3)), 101)
150
    ent -= ent.min()
151
    ent /= ent.max()
152
    local_density -= local_density.min()
153
    local_density /= local_density.max()
154
    info = ent * (1 + local_density)
155
    bw = (info) > filter.threshold_otsu(info)
156
157
158
    fig, ax = plt.subplots()
159
    plt.imshow(image)
160
    C = measure.find_contours(bw, 0.5)
161
    centroid = []
162
    for c in C :
163
        centroid.append(np.linalg.norm([c[:, 1].mean() - bw.shape[1] / 2, c[:, 0].mean() - bw.shape[0] / 2]))
164
    cent = C[np.argmin(centroid)]
165
166
    ax.scatter(cent[:, 1], cent[:, 0], lw=3)
167
168
    plt.grid(False)
169
    plt.savefig(f.split("Sheep")[1])
170
    print ("Done " + f.split("Sheep")[1])
171
172
# <codecell>
173
174
f.split("Sheep")
175
176
# <codecell>
177
178
179
# <codecell>
180
181
# Segmentation
182
183
distance = ndimage.distance_transform_edt(clean)
184
peaks = feature.peak_local_max(distance, indices=False, labels = clean)
185
markers = ndimage.label(peaks)[0]
186
labels = morphology.watershed(-distance, markers, mask = clean)
187
188
io.imshow(labels, interpolation = "nearest", cmap = plt.cm.spectral)
189
190
# <codecell>
191
192
io.imshow(filter.gaussian_filter(image, 31))
193
194
# <codecell>
195
196
# Local density
197
198
B = image[:, :, 2]
199
#B = filter.gaussian_filter(np.abs(B - B.mean()), 31)
200
#io.imshow(B > 1.1*filter.threshold_otsu(B))
201
io.imshow(color.rgb2xyz(image)[:, :, 2])
202
203
204
#local_density = filter.gaussian_filter(image[:,:,0], 41)# - filter.gaussian_filter(clean, 201)
205
#io.imshow(local_density > 1.2 * filter.threshold_otsu(local_density, nbins = 1000))
206
#io.imshow(local_density - local_density.mean() > 0)
207
"""
208
#io.imshow(filter.gaussian_filter(clean, 51))
209
Q = (signal.convolve2d(ski.img_as_float(clean / clean.max()), ski.img_as_float(morphology.disk(201)), "valid"))# - filter.gaussian_filter(clean, 251))
210
Q -= Q.min()
211
Q /= Q.max()
212
io.imshow(Q)
213
"""
214
215
# <codecell>
216
217
B = signal.fftconvolve(ski.img_as_float(A), morphology.disk(31))
218
B -= B.min()
219
B /= B.max()
220
isignal.fftconvolve
221
222
# <codecell>
223
224
import numpy as np
225
import matplotlib.pyplot as plt
226
from scipy import signal
227
228
B = np.zeros((100,100))
229
B[30:70, 30:70] = 1
230
using_c2d = signal.convolve2d(B, B, mode = "same")
231
using_fft = signal.fftconvolve(B, B, mode = "same")
232
233
plt.subplot(131)
234
plt.imshow(B)
235
plt.grid(False)
236
237
plt.subplot(132)
238
plt.imshow(using_c2d)
239
plt.grid(False)
240
241
plt.subplot(133)
242
plt.imshow(using_fft)
243
plt.grid(False)
244
245
print ((using_fft - using_c2d) < 1e-10).all()
246
247
# <codecell>
248
249
plt.imshow(using_fft - using_c2d)
250
251
# <codecell>
252
253
def fftconvolve2d(x, y, mode="full"):
254
    """
255
    x and y must be real 2-d numpy arrays.
256
257
    mode must be "full" or "valid".
258
    """
259
    x_shape = np.array(x.shape)
260
    y_shape = np.array(y.shape)
261
    z_shape = x_shape + y_shape - 1
262
    z = ifft2(fft2(x, z_shape) * fft2(y, z_shape)).real
263
264
    if mode != "full":
265
        # To compute a valid shape, either np.all(x_shape >= y_shape) or
266
        # np.all(y_shape >= x_shape).
267
        valid_shape = x_shape - y_shape + 1
268
        if np.any(valid_shape < 1):
269
            valid_shape = y_shape - x_shape + 1
270
            if np.any(valid_shape < 1):
271
                raise ValueError("empty result for valid shape")
272
273
        if mode == "valid" :                    
274
            start = (z_shape - valid_shape) // 2
275
            end = start + valid_shape
276
            z = z[start[0]:end[0], start[1]:end[1]]
277
            
278
        elif mode == "same" :
279
            start = (z_shape - x_shape) // 2
280
            z = z[start[0]:-start[0], start[1]:-start[1]]
281
        
282
        
283
284
    return z
285
286
def makeGaussian(size, fwhm = 3, center=None):
287
    """ Make a square gaussian kernel.
288
 
289
    size is the length of a side of the square
290
    fwhm is full-width-half-maximum, which
291
    can be thought of as an effective radius.
292
    """
293
 
294
    x = np.arange(0, size, 1, float)
295
    y = x[:,np.newaxis]
296
    
297
    if center is None:
298
        x0 = y0 = size // 2
299
    else:
300
        x0 = center[0]
301
        y0 = center[1]
302
    
303
    return np.exp(-4*np.log(2) * ((x-x0)**2 + (y-y0)**2) / fwhm**2)
304
305
A = clean[1000:2000, 2500:]
306
io.imshow(fftconvolve2d(A, makeGaussian(61, fwhm=31), mode="same"))
307
308
# <codecell>
309
310
311
# <codecell>
312
313
for im in files :
314
    
315
    # Read image
316
    image = io.imread(im)
317
    
318
    # Threshold for nuclei
319
    binary = np.logical_or(filter.threshold_adaptive(exposure.adjust_sigmoid(image[:, :, 0], cutoff=0.4, gain=30), lengthscale), \
320
                      filter.threshold_adaptive(exposure.adjust_sigmoid(image[:, :, 2], gain=30), lengthscale) )
321
    
322
    # Clean up binary image
323
    clean = (1 - morphology.binary_closing(morphology.remove_small_objects(binary, lengthscale), morphology.disk(3))) #* \
324
#            color.rgb2grey(image)
325
326
327
    io.imshow(image)
328
    
329
    # Create smooth density surface
330
#    surf = filter.gaussian_filter(clean, lengthscale, mode = "constant")
331
    
332
    # Calculated baseline and remove
333
    """
334
    r = [BayesianRidge().fit(np.expand_dims(np.arange(surf.shape[i]), axis=1), np.mean(surf, axis = 1-i)) for i in [0, 1]]
335
    X, Y = np.meshgrid(r[1].predict(np.expand_dims(np.arange(surf.shape[1]), axis=1)), \
336
                       r[0].predict(np.expand_dims(np.arange(surf.shape[0]), axis=1)))
337
    """
338
339
340
"""
341
    r = [BayesianRidge().fit(np.vander(np.arange(surf.shape[i]), 2), np.mean(surf, axis = 1-i)) for i in [0, 1]]
342
    X, Y = np.meshgrid(r[1].predict(np.vander(np.arange(surf.shape[1]), 2)), \
343
                       r[0].predict(np.vander(np.arange(surf.shape[0]), 2)))
344
    
345
    baseline = X * Y
346
#    hist, bins = np.histogram(surf.ravel(), 100)
347
#    baseline = bins[np.argmax(hist)+1]# + np.std(surf)
348
    zeroed = surf - baseline
349
    zeroed[zeroed < 0] = 0
350
    
351
    # Find peaks in image centre
352
    peaks = feature.peak_local_max(zeroed[offset:-offset, offset:-offset])
353
    dist = np.min(spatial.distance.pdist(peaks))/2. if len(peaks) > 1 else min(zeroed.shape)/2.
354
    
355
    # Compute volume integrals and maxima per peak
356
    integrals = []
357
    maxima = []
358
    for i in peaks :
359
        mask = np.zeros_like(zeroed)
360
        mask[draw.circle(i[0] + offset, i[1] + offset, dist, shape = zeroed.shape)] = 1
361
        integrals.append(np.sum(zeroed * mask) / np.sum(mask))
362
        maxima.append((zeroed[i[0] + offset, i[1] + offset] + baseline) / baseline)
363
"""    
364
365
#    io.imshow(surf)
366
#    plt.grid(0)
367
368
# <codecell>
369
370
"""
371
#image_max = ndimage.maximum_filter(dist, size=11, mode='constant')
372
373
374
#dist = ndimage.distance_transform_edt(clean)
375
#crit = np.zeros_like(clean)
376
#coordinates = feature.peak_local_max(dist, min_distance=5)
377
378
s2 = filter.gaussian_filter(clean, 21)
379
dist2 = ndimage.distance_transform_edt(s2 > filter.threshold_otsu(s2))
380
381
coordinates2 = feature.peak_local_max(dist2, min_distance=5)
382
383
#print len(coordinates), len(coordinates2)
384
#scales = [11, 21, 31, 41, 51]
385
386
spatial.voronoi_plot_2d(spatial.Voronoi(coordinates2))
387
#io.imshow(filter.gaussian_filter(clean, 11))
388
"""
389
390
391
nuclei = feature.peak_local_max(ndimage.distance_transform_edt(clean), min_distance=11)
392
tessellation 
393
394
scales = [11, 21, 31, 41, 51]
395
for i in scales :
396
397
# <codecell>
398
399
x = []
400
for i in range(3, 51, 2) :
401
    s = filter.gaussian_filter(clean, i)
402
    x.append(len(feature.peak_local_max(ndimage.distance_transform_edt(s > filter.threshold_otsu(s)), min_distance=5)))
403
    
404
plt.plot(range(3, 51, 2), x)
405
406
# <codecell>
407
408
Q = [clean]
409
kernelsize = 51
410
downsamplesize = 3
411
for i in range(1, 6) :
412
    im = filter.gaussian_filter(morphology.binary_opening(Q[i-1], morphology.disk(5)), kernelsize)
413
    im2 = zeros((im.shape[0]/downsamplesize, im.shape[1]/downsamplesize))
414
    for x in range(im2.shape[0]) :
415
        for y in range(im2.shape[1]) :
416
            im2[x, y] = np.mean(im[downsamplesize*x:downsamplesize*(x+1), downsamplesize*y:downsamplesize*(y+1)])
417
    im2 -= im2.min()
418
    im2 /= im2.max()
419
    Q.append(np.round(im2*1.))
420
421
# <codecell>
422
423
q = []
424
for i in Q :
425
    q.append(np.sum(i) / (i.shape[0] * i.shape[1]))
426
plt.plot(q)
427
428
# <codecell>
429
430
io.imshow(Q[2])
431
432
# <codecell>
433
434
#io.imshow(morphology.binary_erosion(clean, np.ones((21,21))))
435
#io.imshow(morphology.binary_dilation(clean, morphology.disk(25)))
436
io.imshow(morphology.binary_dilation(filter.rank.mean(clean, morphology.disk(51)), morphology.disk(51)))
437
438
# <codecell>
439
440
blah = filter.gaussian_filter(clean, 401)
441
plt.hist(blah.ravel(), 256);
442
443
# <codecell>
444
445
io.imshow(blah)# > np.median(blah.ravel()))
446
447
# <codecell>
448
449
#r = [BayesianRidge().fit(np.expand_dims(np.arange(surf.shape[i]), axis=1), np.mean(surf, axis = 1-i)) for i in [0, 1]]
450
X, Y = np.meshgrid(r[0].predict(np.expand_dims(np.arange(surf.shape[0]), axis=1)), \
451
                   r[1].predict(np.expand_dims(np.arange(surf.shape[1]), axis=1)))
452
453
zeroed.shape
454
455
# <codecell>
456
457
c = clean - np.min(clean)
458
c /= c.max()
459
c = c.astype(bool)
460
io.imsave("/Users/qcaudron/Desktop/charo/2_smoothed.jpg", ski.img_as_uint(surf))
461
462
# <codecell>
463
464
z1 = np.mean(surf, axis=0)
465
z2 = np.mean(surf, axis=1)
466
467
#for i in range(surf.shape[1]) : 
468
#    plt.plot(surf[:, i], "k")
469
#plt.plot(z2)
470
r = [BayesianRidge().fit(np.vander(np.arange(surf.shape[i]), 2), np.mean(surf, axis = 1-i)) for i in [0, 1]]
471
r1 = BayesianRidge().fit(np.arange(len(z1)).reshape(len(z1),1), z1)
472
r2 = BayesianRidge().fit(np.arange(len(z2[500:-500])).reshape(len(z2[500:-500]),1), z2[500:-500])
473
474
#plt.plot(r1.predict(np.arange(len(z1)).reshape(len(z1),1)), linewidth=5)
475
plt.plot(r2.predict(np.arange(len(z2)).reshape(len(z2),1)), linewidth=5)
476
plt.plot(z2, linewidth=5)
477
#plt.axhline(b[np.argmax(h)], c="r", linewidth=3)
478
#plt.plot(r[0].predict(np.vander(np.arange(surf.shape[0]), 2)), linewidth=3)
479
480
#plt.plot(r[0].predict(np.arange(len(z1)).reshape(len(z1),1)), linewidth=3)
481
#plt.plot(r[0].predict(np.expand_dims(np.arange(surf.shape[0]), axis=1)), linewidth=5)
482
#plt.axhline(np.mean(z1 / r1.predict(np.arange(len(z1)).reshape(len(z1),1))))
483
484
# <codecell>
485
486
lz = np.log(z2)
487
r3 = BayesianRidge().fit(np.arange(len(lz[500:-500])).reshape(len(lz[500:-500]),1), lz[500:-500])
488
489
plt.plot(np.exp(lz))
490
plt.plot(np.exp(r3.predict(np.arange(len(lz)).reshape(len(lz),1))))
491
492
# <codecell>
493
494
import matplotlib.pyplot as plt
495
from mpl_toolkits.mplot3d import Axes3D
496
from matplotlib import cm
497
fig = plt.figure()
498
ax = fig.add_subplot(111, projection='3d')
499
x, y = np.meshgrid(range(surf.shape[0]), range(surf.shape[1]))
500
501
x.shape
502
ax.plot_surface(y, x, zeroed.T, rstride = 50, cstride = 50, antialiased = False, cmap = cm.coolwarm, linewidth=0)
503
504
# <codecell>
505
506
np.min(baseline)
507
508
# <codecell>
509
510
x2, y2 = draw.circle(peaks[1][0]+1000, peaks[1][1]+1000, dist*1., shape = (zeroed.shape))
511
mask2 = zeros_like(zeroed)
512
mask2[x, y] = 1
513
np.sum(zeroed * mask - zeroed * mask2)
514
515
np.sum(zeroed * mask2)
516