Diff of /4x/reference/batch.py [000000] .. [171cba]

Switch to unified view

a b/4x/reference/batch.py
1
# -*- coding: utf-8 -*-
2
# <nbformat>3.0</nbformat>
3
4
# <codecell>
5
6
# Files
7
import os
8
import pickle
9
import sys
10
11
# Basic
12
import numpy as np
13
14
# Image Processing
15
import skimage as ski
16
from skimage import io, feature, morphology, filter, exposure, color, transform, measure
17
import scipy.signal as sig
18
from scipy.spatial import distance
19
from scipy.ndimage import maximum_filter, minimum_filter, binary_fill_holes
20
21
# Stats
22
import scipy.stats as st
23
24
import matplotlib
25
%matplotlib inline
26
import matplotlib.pyplot as plt
27
matplotlib.rcParams["figure.figsize"] = (16, 10)
28
29
# <codecell>
30
31
# Read files
32
files = []
33
directory = "../data/"
34
for i in os.listdir(directory) :
35
    if i.endswith(".jpg") : # if it's a jpg
36
        if not i.endswith("_processed.jpg") : # and isn't a processed image
37
            files.append(directory + i) # then add it to the list to be processed
38
            
39
files = [files[50]]
40
io.imshow(io.imread(files[0]))
41
42
# <codecell>
43
44
45
# Iterate over files
46
for f in files :
47
48
    # If it's not been processed before
49
    if not os.path.exists(f + "_processed.jpg") :
50
51
        ### PROCESSING
52
        
53
        # Read the image
54
        A = io.imread(f)
55
        
56
        # Constrast enhancement
57
        B = exposure.adjust_sigmoid(A, gain=12)
58
        
59
        # Extract luminosity
60
        C = color.rgb2xyz(B)[:, :, 1]
61
        
62
        # Apply adaptive thresholding
63
        D = filter.threshold_adaptive(C, 301)
64
        
65
        # Clean
66
        E = morphology.remove_small_objects(~morphology.remove_small_objects(~D, 100), 100)
67
68
        # Save to disk
69
        io.imsave(f + "_processed.jpg", ski.img_as_float(E))
70
        
71
        # Downsample for Gabor filtering
72
        Es = ski.img_as_float(transform.rescale(E, 0.25))
73
74
75
    else :
76
77
        # Otherwise, we've processed it before, so read it in for speed
78
        A = io.imread(f)
79
        E = ski.img_as_float(io.imread(f + "_processed.jpg"))
80
        Es = ski.img_as_float(transform.rescale(E, 0.25))
81
        
82
83
# <codecell>
84
85
A = io.imread(files[0])[:2000, 1000:-1000, :]
86
A2 = filter.gaussian_filter(A, 5)
87
B1 = exposure.adjust_sigmoid(A, gain=12)
88
B2 = exposure.adjust_sigmoid(A2, gain=12)
89
C1 = color.rgb2xyz(B1)[:, :, 1]
90
C2 = color.rgb2xyz(B2)[:, :, 1]
91
D1 = filter.threshold_adaptive(C1, 301)
92
D2 = filter.threshold_adaptive(C2, 301)
93
E1 = morphology.remove_small_objects(~morphology.remove_small_objects(~D1, 100), 100)
94
E2 = morphology.remove_small_objects(~morphology.remove_small_objects(~D2, 100), 100)
95
96
d1 = filter.threshold_adaptive(C1, 301, offset=-0.01)
97
d2 = filter.threshold_adaptive(C2, 301, offset=-0.01)
98
e1 = morphology.remove_small_objects(~morphology.remove_small_objects(~d1, 100), 100)
99
e2 = morphology.remove_small_objects(~morphology.remove_small_objects(~d2, 100), 100)
100
101
print np.abs(E1 - e1).sum()
102
print np.abs(E2 - e2).sum()
103
104
# <codecell>
105
106
(A2.shape[0] * A2.shape[1])
107
108
# <codecell>
109
110
    pixelscales = np.arange(15, 55, 2)
111
    gaborscales = 4. / pixelscales # 2 to 20 pixels
112
113
    orientations = np.linspace(0, np.pi * 11./12., 12) # 0 to 180 degrees in 15 degree increments
114
115
    # Results array
116
    gabor = np.zeros((len(orientations), len(gaborscales)))
117
118
    # Perform Gabor filtering
119
    for i, iv in enumerate(orientations) :
120
        for j, jv in enumerate(gaborscales) :
121
            gaborReal, gaborImag = filter.gabor_filter(Es, jv, iv)
122
            gabor[i, j] = np.sqrt(np.sum(np.abs(gaborReal) ** 2) + np.sum(np.abs(gaborImag) ** 2)) # Return energy
123
        print "Thread %s. Gabor filtering. Completion : %f" % (sys.argv[1], (i / float(len(orientations))))
124
125
    # Determine orientation-independent scale which fits best
126
    optimalscale = np.argmax(np.sum(gabor, axis = 0))
127
128
    # At this scale, calculate directionality coefficient
129
    g = gabor[:, optimalscale]
130
    directionality = (g.max() - g.min()) / g.max()
131
132
# <codecell>
133
134
    scaleent = []
135
    scaleentstd = []
136
    roylac = []
137
138
139
    # Characteristic scale
140
    s = pixelscales[optimalscale]
141
142
    # Generate a disk at this scale
143
    circle = morphology.disk(s)
144
    circlesize = circle.sum()
145
146
    # Convolve with image
147
    Y = sig.fftconvolve(E, circle, "valid")
148
149
    # Compute information entropy
150
    px = Y.ravel() / circlesize
151
    py = 1. - px
152
    idx = np.logical_and(px > 1. / circlesize, px < 1.)
153
    entropy = - ( np.mean(px[idx] * np.log(px[idx])) + np.mean(py[idx] * np.log(py[idx])) )
154
    entropystd = np.std(px[idx] * np.log(px[idx]) + py[idx] * np.log(py[idx]))
155
156
    # Compute normalised lacunarity
157
    lx = np.var(Y) / (np.mean(Y) ** 2) + 1
158
    ly = np.var(1. - Y) / (np.mean(1. - Y) ** 2) + 1
159
160
    # Roy et al, J. Struct. Geol. 2010
161
162
    # Results
163
    roylac.append((lx - 1.) / (1./np.mean(E) - 1.))
164
    scaleent.append(entropy)
165
    scaleentstd.append(entropystd)
166
167
# <codecell>
168
169
    qstain = np.array([[.26451728, .5205347, .81183386], [.9199094, .29797825, .25489032], [.28947765, .80015373, .5253158]])
170
171
    deconv = ski.img_as_float(color.separate_stains(transform.rescale(A, 0.25), np.linalg.inv(qstain)))
172
173
174
175
    subveins1 = \
176
    morphology.remove_small_objects(
177
        filter.threshold_adaptive(
178
            filter.gaussian_filter(
179
                deconv[:, :, 2] / deconv[:, :, 0],
180
                11),
181
            250, offset = -0.13),
182
        60)
183
184
    subveins2 = \
185
    morphology.remove_small_objects(
186
        filter.threshold_adaptive(
187
            filter.gaussian_filter(
188
                    maximum_filter(
189
                    deconv[:, :, 2] / deconv[:, :, 0],
190
                    5),
191
                11),
192
            250, offset = -0.13),
193
        60)
194
195
    veins = \
196
    maximum_filter(
197
        morphology.remove_small_objects(
198
            binary_fill_holes(
199
                morphology.binary_closing(
200
                    np.logical_or(subveins1, subveins2),
201
                    morphology.disk(25)),
202
                ),
203
            250),
204
        55)
205
206
207
208
209
210
211
    rawinflammation = \
212
    morphology.remove_small_objects(
213
    filter.threshold_adaptive(
214
        exposure.adjust_sigmoid(
215
            filter.gaussian_filter(
216
                exposure.equalize_adapthist(
217
                    exposure.rescale_intensity(
218
                        deconv[:, :, 1],
219
                        out_range = (0, 1)),
220
                    ntiles_y = 1),
221
                    5),
222
            cutoff = 0.6),
223
            75, offset = -0.12),
224
        250)
225
226
    
227
    inflammation = \
228
    maximum_filter(
229
        rawinflammation,
230
        55)
231
232
# <codecell>
233
234
    total = veins + inflammation
235
    coloured = np.zeros_like(deconv)
236
    coloured[:, :, 1] = veins
237
    coloured[:, :, 2] = inflammation
238
239
240
    labelled, regions = measure.label(total, return_num = True)
241
    
242
243
    inflammationcount = 0
244
    inflammationsize = []
245
246
247
248
    for b in range(1, regions) :
249
250
        if (inflammation * (labelled == b)).sum() / ((veins * (labelled == b)).sum() + 1) > 0.5 :
251
            inflammationcount += 1
252
            inflammationsize.append((rawinflammation * labelled == b).sum())
253
254
# <codecell>
255
256
regions
257
258
# <codecell>
259
260
io.imshow(A)
261
262
# <codecell>
263
264
io.imshow(inflammation)
265
plt.scatter([qq[i].centroid[1] for i in range(regions-1)], [qq[i].centroid[0] for i in range(regions-1)])
266
267
# <codecell>
268
269
pairwise.min(axis=0).mean()
270