a b/darkflow/net/ops/simple.py
1
import tf_slim as slim
2
from .baseop import BaseOp
3
import tensorflow as tf
4
from distutils.version import StrictVersion
5
6
7
class route(BaseOp):
8
    def forward(self):
9
        routes = self.lay.routes
10
        routes_out = list()
11
        for r in routes:
12
            this = self.inp
13
            while this.lay.number != r:
14
                this = this.inp
15
                assert this is not None, \
16
                    'Routing to non-existence {}'.format(r)
17
            routes_out += [this.out]
18
        self.out = tf.concat(routes_out, 3)
19
20
    def speak(self):
21
        msg = 'concat {}'
22
        return msg.format(self.lay.routes)
23
24
25
class connected(BaseOp):
26
    def forward(self):
27
        self.out = tf.nn.xw_plus_b(
28
            self.inp.out,
29
            self.lay.w['weights'],
30
            self.lay.w['biases'],
31
            name=self.scope)
32
33
    def speak(self):
34
        layer = self.lay
35
        args = [layer.inp, layer.out]
36
        args += [layer.activation]
37
        msg = 'full {} x {}  {}'
38
        return msg.format(*args)
39
40
41
class select(connected):
42
    """a weird connected layer"""
43
44
    def speak(self):
45
        layer = self.lay
46
        args = [layer.inp, layer.out]
47
        args += [layer.activation]
48
        msg = 'sele {} x {}  {}'
49
        return msg.format(*args)
50
51
52
class extract(connected):
53
    """a weird connected layer"""
54
55
    def speak(self):
56
        layer = self.lay
57
        args = [len(layer.inp), len(layer.out)]
58
        args += [layer.activation]
59
        msg = 'extr {} x {}  {}'
60
        return msg.format(*args)
61
62
63
class flatten(BaseOp):
64
    def forward(self):
65
        temp = tf.transpose(
66
            self.inp.out, [0, 3, 1, 2])
67
        self.out = slim.flatten(
68
            temp, scope=self.scope)
69
70
    def speak(self): return 'flat'
71
72
73
class softmax(BaseOp):
74
    def forward(self):
75
        self.out = tf.nn.softmax(self.inp.out)
76
77
    def speak(self): return 'softmax()'
78
79
80
class avgpool(BaseOp):
81
    def forward(self):
82
        self.out = tf.reduce_mean(
83
            self.inp.out, [1, 2],
84
            name=self.scope
85
        )
86
87
    def speak(self): return 'avgpool()'
88
89
90
class dropout(BaseOp):
91
    def forward(self):
92
        if self.lay.h['pdrop'] is None:
93
            self.lay.h['pdrop'] = 1.0
94
        self.out = tf.nn.dropout(
95
            self.inp.out,
96
            self.lay.h['pdrop'],
97
            name=self.scope
98
        )
99
100
    def speak(self): return 'drop'
101
102
103
class crop(BaseOp):
104
    def forward(self):
105
        self.out = self.inp.out * 2. - 1.
106
107
    def speak(self):
108
        return 'scale to (-1, 1)'
109
110
111
class maxpool(BaseOp):
112
    def forward(self):
113
        self.out = tf.nn.max_pool(
114
            self.inp.out, padding='SAME',
115
            ksize=[1] + [self.lay.ksize] * 2 + [1],
116
            strides=[1] + [self.lay.stride] * 2 + [1],
117
            name=self.scope
118
        )
119
120
    def speak(self):
121
        l = self.lay
122
        return 'maxp {}x{}p{}_{}'.format(
123
            l.ksize, l.ksize, l.pad, l.stride)
124
125
126
class leaky(BaseOp):
127
    def forward(self):
128
        self.out = tf.maximum(
129
            .1 * self.inp.out,
130
            self.inp.out,
131
            name=self.scope
132
        )
133
134
    def verbalise(self): pass
135
136
137
class identity(BaseOp):
138
    def __init__(self, inp):
139
        self.inp = None
140
        self.out = inp