Switch to unified view

a b/darkflow/dark/convolution.py
1
from .layer import Layer
2
import numpy as np
3
4
5
class local_layer(Layer):
6
    def setup(self, ksize, c, n, stride,
7
              pad, w_, h_, activation):
8
        self.pad = pad * int(ksize / 2)
9
        self.activation = activation
10
        self.stride = stride
11
        self.ksize = ksize
12
        self.h_out = h_
13
        self.w_out = w_
14
15
        self.dnshape = [h_ * w_, n, c, ksize, ksize]
16
        self.wshape = dict({
17
            'biases': [h_ * w_ * n],
18
            'kernels': [h_ * w_, ksize, ksize, c, n]
19
        })
20
21
    def finalize(self, _):
22
        weights = self.w['kernels']
23
        if weights is None: return
24
        weights = weights.reshape(self.dnshape)
25
        weights = weights.transpose([0, 3, 4, 2, 1])
26
        self.w['kernels'] = weights
27
28
29
class conv_extract_layer(Layer):
30
    def setup(self, ksize, c, n, stride,
31
              pad, batch_norm, activation,
32
              inp, out):
33
        if inp is None: inp = range(c)
34
        self.activation = activation
35
        self.batch_norm = batch_norm
36
        self.stride = stride
37
        self.ksize = ksize
38
        self.pad = pad
39
        self.inp = inp
40
        self.out = out
41
        self.wshape = dict({
42
            'biases': [len(out)],
43
            'kernel': [ksize, ksize, len(inp), len(out)]
44
        })
45
46
    @property
47
    def signature(self):
48
        sig = ['convolutional']
49
        sig += self._signature[1:-2]
50
        return sig
51
52
    def present(self):
53
        args = self.signature
54
        self.presenter = convolutional_layer(*args)
55
56
    def recollect(self, w):
57
        if w is None:
58
            self.w = w
59
            return
60
        k = w['kernel']
61
        b = w['biases']
62
        k = np.take(k, self.inp, 2)
63
        k = np.take(k, self.out, 3)
64
        b = np.take(b, self.out)
65
        assert1 = k.shape == tuple(self.wshape['kernel'])
66
        assert2 = b.shape == tuple(self.wshape['biases'])
67
        assert assert1 and assert2, \
68
            'Dimension not matching in {} recollect'.format(
69
                self._signature)
70
        self.w['kernel'] = k
71
        self.w['biases'] = b
72
73
74
class conv_select_layer(Layer):
75
    def setup(self, ksize, c, n, stride,
76
              pad, batch_norm, activation,
77
              keep_idx, real_n):
78
        self.batch_norm = bool(batch_norm)
79
        self.activation = activation
80
        self.keep_idx = keep_idx
81
        self.stride = stride
82
        self.ksize = ksize
83
        self.pad = pad
84
        self.wshape = dict({
85
            'biases': [real_n],
86
            'kernel': [ksize, ksize, c, real_n]
87
        })
88
        if self.batch_norm:
89
            self.wshape.update({
90
                'moving_variance': [real_n],
91
                'moving_mean': [real_n],
92
                'gamma': [real_n]
93
            })
94
            self.h['is_training'] = {
95
                'shape': (),
96
                'feed': True,
97
                'dfault': False
98
            }
99
100
    @property
101
    def signature(self):
102
        sig = ['convolutional']
103
        sig += self._signature[1:-2]
104
        return sig
105
106
    def present(self):
107
        args = self.signature
108
        self.presenter = convolutional_layer(*args)
109
110
    def recollect(self, w):
111
        if w is None:
112
            self.w = w
113
            return
114
        idx = self.keep_idx
115
        k = w['kernel']
116
        b = w['biases']
117
        self.w['kernel'] = np.take(k, idx, 3)
118
        self.w['biases'] = np.take(b, idx)
119
        if self.batch_norm:
120
            m = w['moving_mean']
121
            v = w['moving_variance']
122
            g = w['gamma']
123
            self.w['moving_mean'] = np.take(m, idx)
124
            self.w['moving_variance'] = np.take(v, idx)
125
            self.w['gamma'] = np.take(g, idx)
126
127
128
class convolutional_layer(Layer):
129
    def setup(self, ksize, c, n, stride,
130
              pad, batch_norm, activation):
131
        self.batch_norm = bool(batch_norm)
132
        self.activation = activation
133
        self.stride = stride
134
        self.ksize = ksize
135
        self.pad = pad
136
        self.dnshape = [n, c, ksize, ksize]  # darknet shape
137
        self.wshape = dict({
138
            'biases': [n],
139
            'kernel': [ksize, ksize, c, n]
140
        })
141
        if self.batch_norm:
142
            self.wshape.update({
143
                'moving_variance': [n],
144
                'moving_mean': [n],
145
                'gamma': [n]
146
            })
147
            self.h['is_training'] = {
148
                'feed': True,
149
                'dfault': False,
150
                'shape': ()
151
            }
152
153
    def finalize(self, _):
154
        """deal with darknet"""
155
        kernel = self.w['kernel']
156
        if kernel is None: return
157
        kernel = kernel.reshape(self.dnshape)
158
        kernel = kernel.transpose([2, 3, 1, 0])
159
        self.w['kernel'] = kernel