Diff of /UNET.py [000000] .. [c621c3]

Switch to unified view

a b/UNET.py
1
import torch
2
import torch.nn as nn
3
import torchvision.transforms.functional as TF
4
5
class DoubleConvolution(nn.Module):
6
    def __init__(self, in_channels, out_channels):
7
        super(DoubleConvolution, self).__init__()
8
        self.conv = nn.Sequential(
9
            nn.Conv2d(in_channels, out_channels, 3, 1, 1, bias=False),
10
            # Although in the original paper, these convolutions are unpadded but we didn't do it here.
11
            nn.BatchNorm2d(out_channels),
12
            nn.ReLU(inplace=True),
13
            nn.Conv2d(out_channels, out_channels, 3, 1, 1, bias=False),
14
            nn.BatchNorm2d(out_channels),
15
            nn.ReLU(inplace=True)
16
        )
17
18
    def forward(self, x):
19
        return self.conv(x)
20
21
class UNET(nn.Module):
22
    def __init__(self, in_channels=3, out_channels=1, features = [64, 128, 256, 512]):
23
        super(UNET, self).__init__()
24
        self.downs = nn.ModuleList()
25
        self.ups = nn.ModuleList()
26
        self.pool = nn.MaxPool2d(kernel_size=2, stride=2)
27
28
29
        for feature in features:
30
            self.downs.append(DoubleConvolution(in_channels, feature))
31
            in_channels = feature
32
33
        for feature in reversed(features):
34
            self.ups.append(
35
                nn.ConvTranspose2d(feature*2, feature, kernel_size=2, stride=2)
36
            )
37
            self.ups.append(DoubleConvolution(feature*2, feature))
38
39
        self.bottleneck = DoubleConvolution(features[-1], features[-1]*2)
40
        self.final_conv = nn.Conv2d(features[0], out_channels, kernel_size=1)
41
42
    def forward(self, x):
43
        skip_connections = []
44
        for down in self.downs:
45
            x = down(x)
46
            skip_connections.append(x)
47
            x = self.pool(x)
48
        x = self.bottleneck(x)
49
        skip_connections = skip_connections[::-1]
50
51
        for idx in range(0, len(self.ups), 2):
52
            x = self.ups[idx](x)
53
            skip_connection = skip_connections[idx//2]
54
55
            if x.shape != skip_connection.shape:
56
                x = TF.resize(x, size=skip_connection.shape[2:])
57
58
            concat_skip = torch.cat((skip_connection, x), dim=1)
59
            x = self.ups[idx+1](concat_skip)
60
61
        return self.final_conv(x)