Diff of /darkflow/utils/process.py [000000] .. [d34869]

Switch to unified view

a b/darkflow/utils/process.py
1
"""
2
WARNING: spaghetti code.
3
"""
4
5
import numpy as np
6
import pickle
7
import os
8
9
10
def parser(model):
11
    """
12
    Read the .cfg file to extract layers into `layers`
13
    as well as model-specific parameters into `meta`
14
    """
15
16
    def _parse(l, i=1):
17
        return l.split('=')[i].strip()
18
19
    with open(model, 'rb') as f:
20
        lines = f.readlines()
21
22
    lines = [line.decode() for line in lines]
23
24
    meta = dict();
25
    layers = list()  # will contains layers' info
26
    h, w, c = [int()] * 3;
27
    layer = dict()
28
    for line in lines:
29
        line = line.strip()
30
        line = line.split('#')[0]
31
        if '[' in line:
32
            if layer != dict():
33
                if layer['type'] == '[net]':
34
                    h = layer['height']
35
                    w = layer['width']
36
                    c = layer['channels']
37
                    meta['net'] = layer
38
                else:
39
                    if layer['type'] == '[crop]':
40
                        h = layer['crop_height']
41
                        w = layer['crop_width']
42
                    layers += [layer]
43
            layer = {'type': line}
44
        else:
45
            try:
46
                i = float(_parse(line))
47
                if i == int(i): i = int(i)
48
                layer[line.split('=')[0].strip()] = i
49
            except:
50
                try:
51
                    key = _parse(line, 0)
52
                    val = _parse(line, 1)
53
                    layer[key] = val
54
                except:
55
                    'banana ninja yadayada'
56
57
    meta.update(layer)  # last layer contains meta info
58
    if 'anchors' in meta:
59
        splits = meta['anchors'].split(',')
60
        anchors = [float(x.strip()) for x in splits]
61
        meta['anchors'] = anchors
62
    meta['model'] = model  # path to cfg, not model name
63
    meta['inp_size'] = [h, w, c]
64
    return layers, meta
65
66
67
def cfg_yielder(model, binary):
68
    """
69
    yielding each layer information to initialize `layer`
70
    """
71
    layers, meta = parser(model);
72
    yield meta;
73
    h, w, c = meta['inp_size'];
74
    l = w * h * c
75
76
    # Start yielding
77
    flat = False  # flag for 1st dense layer
78
    conv = '.conv.' in model
79
    for i, d in enumerate(layers):
80
        # -----------------------------------------------------
81
        if d['type'] == '[crop]':
82
            yield ['crop', i]
83
        # -----------------------------------------------------
84
        elif d['type'] == '[local]':
85
            n = d.get('filters', 1)
86
            size = d.get('size', 1)
87
            stride = d.get('stride', 1)
88
            pad = d.get('pad', 0)
89
            activation = d.get('activation', 'logistic')
90
            w_ = (w - 1 - (1 - pad) * (size - 1)) // stride + 1
91
            h_ = (h - 1 - (1 - pad) * (size - 1)) // stride + 1
92
            yield ['local', i, size, c, n, stride,
93
                   pad, w_, h_, activation]
94
            if activation != 'linear': yield [activation, i]
95
            w, h, c = w_, h_, n
96
            l = w * h * c
97
        # -----------------------------------------------------
98
        elif d['type'] == '[convolutional]':
99
            n = d.get('filters', 1)
100
            size = d.get('size', 1)
101
            stride = d.get('stride', 1)
102
            pad = d.get('pad', 0)
103
            padding = d.get('padding', 0)
104
            if pad: padding = size // 2
105
            activation = d.get('activation', 'logistic')
106
            batch_norm = d.get('batch_normalize', 0) or conv
107
            yield ['convolutional', i, size, c, n,
108
                   stride, padding, batch_norm,
109
                   activation]
110
            if activation != 'linear': yield [activation, i]
111
            w_ = (w + 2 * padding - size) // stride + 1
112
            h_ = (h + 2 * padding - size) // stride + 1
113
            w, h, c = w_, h_, n
114
            l = w * h * c
115
        # -----------------------------------------------------
116
        elif d['type'] == '[maxpool]':
117
            stride = d.get('stride', 1)
118
            size = d.get('size', stride)
119
            padding = d.get('padding', (size - 1) // 2)
120
            yield ['maxpool', i, size, stride, padding]
121
            w_ = (w + 2 * padding) // d['stride']
122
            h_ = (h + 2 * padding) // d['stride']
123
            w, h = w_, h_
124
            l = w * h * c
125
        # -----------------------------------------------------
126
        elif d['type'] == '[avgpool]':
127
            flat = True;
128
            l = c
129
            yield ['avgpool', i]
130
        # -----------------------------------------------------
131
        elif d['type'] == '[softmax]':
132
            yield ['softmax', i, d['groups']]
133
        # -----------------------------------------------------
134
        elif d['type'] == '[connected]':
135
            if not flat:
136
                yield ['flatten', i]
137
                flat = True
138
            activation = d.get('activation', 'logistic')
139
            yield ['connected', i, l, d['output'], activation]
140
            if activation != 'linear': yield [activation, i]
141
            l = d['output']
142
        # -----------------------------------------------------
143
        elif d['type'] == '[dropout]':
144
            yield ['dropout', i, d['probability']]
145
        # -----------------------------------------------------
146
        elif d['type'] == '[select]':
147
            if not flat:
148
                yield ['flatten', i]
149
                flat = True
150
            inp = d.get('input', None)
151
            if type(inp) is str:
152
                file = inp.split(',')[0]
153
                layer_num = int(inp.split(',')[1])
154
                with open(file, 'rb') as f:
155
                    profiles = pickle.load(f, encoding='latin1')[0]
156
                layer = profiles[layer_num]
157
            else:
158
                layer = inp
159
            activation = d.get('activation', 'logistic')
160
            d['keep'] = d['keep'].split('/')
161
            classes = int(d['keep'][-1])
162
            keep = [int(c) for c in d['keep'][0].split(',')]
163
            keep_n = len(keep)
164
            train_from = classes * d['bins']
165
            for count in range(d['bins'] - 1):
166
                for num in keep[-keep_n:]:
167
                    keep += [num + classes]
168
            k = 1
169
            while layers[i - k]['type'] not in ['[connected]', '[extract]']:
170
                k += 1
171
                if i - k < 0:
172
                    break
173
            if i - k < 0:
174
                l_ = l
175
            elif layers[i - k]['type'] == 'connected':
176
                l_ = layers[i - k]['output']
177
            else:
178
                l_ = layers[i - k].get('old', [l])[-1]
179
            yield ['select', i, l_, d['old_output'],
180
                   activation, layer, d['output'],
181
                   keep, train_from]
182
            if activation != 'linear': yield [activation, i]
183
            l = d['output']
184
        # -----------------------------------------------------
185
        elif d['type'] == '[conv-select]':
186
            n = d.get('filters', 1)
187
            size = d.get('size', 1)
188
            stride = d.get('stride', 1)
189
            pad = d.get('pad', 0)
190
            padding = d.get('padding', 0)
191
            if pad: padding = size // 2
192
            activation = d.get('activation', 'logistic')
193
            batch_norm = d.get('batch_normalize', 0) or conv
194
            d['keep'] = d['keep'].split('/')
195
            classes = int(d['keep'][-1])
196
            keep = [int(x) for x in d['keep'][0].split(',')]
197
198
            segment = classes + 5
199
            assert n % segment == 0, \
200
                'conv-select: segment failed'
201
            bins = n // segment
202
            keep_idx = list()
203
            for j in range(bins):
204
                offset = j * segment
205
                for k in range(5):
206
                    keep_idx += [offset + k]
207
                for k in keep:
208
                    keep_idx += [offset + 5 + k]
209
            w_ = (w + 2 * padding - size) // stride + 1
210
            h_ = (h + 2 * padding - size) // stride + 1
211
            c_ = len(keep_idx)
212
            yield ['conv-select', i, size, c, n,
213
                   stride, padding, batch_norm,
214
                   activation, keep_idx, c_]
215
            w, h, c = w_, h_, c_
216
            l = w * h * c
217
        # -----------------------------------------------------
218
        elif d['type'] == '[conv-extract]':
219
            file = d['profile']
220
            with open(file, 'rb') as f:
221
                profiles = pickle.load(f, encoding='latin1')[0]
222
            inp_layer = None
223
            inp = d['input']
224
            out = d['output']
225
            inp_layer = None
226
            if inp >= 0:
227
                inp_layer = profiles[inp]
228
            if inp_layer is not None:
229
                assert len(inp_layer) == c, \
230
                    'Conv-extract does not match input dimension'
231
            out_layer = profiles[out]
232
233
            n = d.get('filters', 1)
234
            size = d.get('size', 1)
235
            stride = d.get('stride', 1)
236
            pad = d.get('pad', 0)
237
            padding = d.get('padding', 0)
238
            if pad: padding = size // 2
239
            activation = d.get('activation', 'logistic')
240
            batch_norm = d.get('batch_normalize', 0) or conv
241
242
            k = 1
243
            find = ['[convolutional]', '[conv-extract]']
244
            while layers[i - k]['type'] not in find:
245
                k += 1
246
                if i - k < 0: break
247
            if i - k >= 0:
248
                previous_layer = layers[i - k]
249
                c_ = previous_layer['filters']
250
            else:
251
                c_ = c
252
253
            yield ['conv-extract', i, size, c_, n,
254
                   stride, padding, batch_norm,
255
                   activation, inp_layer, out_layer]
256
            if activation != 'linear': yield [activation, i]
257
            w_ = (w + 2 * padding - size) // stride + 1
258
            h_ = (h + 2 * padding - size) // stride + 1
259
            w, h, c = w_, h_, len(out_layer)
260
            l = w * h * c
261
        # -----------------------------------------------------
262
        elif d['type'] == '[extract]':
263
            if not flat:
264
                yield ['flatten', i]
265
                flat = True
266
            activation = d.get('activation', 'logistic')
267
            file = d['profile']
268
            with open(file, 'rb') as f:
269
                profiles = pickle.load(f, encoding='latin1')[0]
270
            inp_layer = None
271
            inp = d['input']
272
            out = d['output']
273
            if inp >= 0:
274
                inp_layer = profiles[inp]
275
            out_layer = profiles[out]
276
            old = d['old']
277
            old = [int(x) for x in old.split(',')]
278
            if inp_layer is not None:
279
                if len(old) > 2:
280
                    h_, w_, c_, n_ = old
281
                    new_inp = list()
282
                    for p in range(c_):
283
                        for q in range(h_):
284
                            for r in range(w_):
285
                                if p not in inp_layer:
286
                                    continue
287
                                new_inp += [r + w * (q + h * p)]
288
                    inp_layer = new_inp
289
                    old = [h_ * w_ * c_, n_]
290
                assert len(inp_layer) == l, \
291
                    'Extract does not match input dimension'
292
            d['old'] = old
293
            yield ['extract', i] + old + [activation] + [inp_layer, out_layer]
294
            if activation != 'linear': yield [activation, i]
295
            l = len(out_layer)
296
        # -----------------------------------------------------
297
        elif d['type'] == '[route]':  # add new layer here
298
            routes = d['layers']
299
            if type(routes) is int:
300
                routes = [routes]
301
            else:
302
                routes = [int(x.strip()) for x in routes.split(',')]
303
            routes = [i + x if x < 0 else x for x in routes]
304
            for j, x in enumerate(routes):
305
                lx = layers[x];
306
                xtype = lx['type']
307
                _size = lx['_size'][:3]
308
                if j == 0:
309
                    h, w, c = _size
310
                else:
311
                    h_, w_, c_ = _size
312
                    assert w_ == w and h_ == h, \
313
                        'Routing incompatible conv sizes'
314
                    c += c_
315
            yield ['route', i, routes]
316
            l = w * h * c
317
        # -----------------------------------------------------
318
        elif d['type'] == '[reorg]':
319
            stride = d.get('stride', 1)
320
            yield ['reorg', i, stride]
321
            w = w // stride;
322
            h = h // stride;
323
            c = c * (stride ** 2)
324
            l = w * h * c
325
        # -----------------------------------------------------
326
        else:
327
            exit('Layer {} not implemented'.format(d['type']))
328
329
        d['_size'] = list([h, w, c, l, flat])
330
331
    if not flat:
332
        meta['out_size'] = [h, w, c]
333
    else:
334
        meta['out_size'] = l