Switch to unified view

a b/model-description/architecture-concise.txt
1
=========================================================================================================
2
Layer (type:depth-idx)                                  Input Shape               Output Shape
3
=========================================================================================================
4
Unet                                                    --                        --
5
├─EfficientNetEncoder-B5: 1-1                           [1, 3, 512, 512]          [1, 3, 512, 512]
6
│    └─Conv2d: 2-2                                      [1, 3, 512, 512]          [1, 48, 256, 256]
7
│    └─BatchNorm2d: 2-3                                 [1, 48, 256, 256]         [1, 48, 256, 256]
8
│    └─Swish: 2-4                                       [1, 48, 256, 256]         [1, 48, 256, 256]
9
│    └─Sequential: 2                                    --                        --
10
│    │    └─Sequential: 3-1                             [1, 48, 256, 256]         [1, 24, 256, 256]
11
│    │    │    └─DepthwiseSepConv: 4-1 -> 4-3           [1, 48, 256, 256]         [1, 24, 256, 256]
12
│    │    └─Sequential: 3-2                             [1, 24, 256, 256]         [1, 40, 128, 128]
13
│    │    │    └─InvertedResidual: 4-4 -> 4-8           [1, 24, 256, 256]         [1, 40, 128, 128]
14
│    │    └─Sequential: 3-3                             [1, 40, 128, 128]         [1, 64, 64, 64]
15
│    │    │    └─InvertedResidual: 4-9 -> 4-13          [1, 40, 128, 128]         [1, 64, 64, 64]
16
│    │    └─Sequential: 3-4                             [1, 64, 64, 64]           [1, 128, 32, 32]
17
│    │    │    └─InvertedResidual: 4-14 -> 4-20         [1, 64, 64, 64]           [1, 128, 32, 32]
18
│    │    └─Sequential: 3-5                             [1, 128, 32, 32]          [1, 176, 32, 32]
19
│    │    │    └─InvertedResidual: 4-21 -> 4-27         [1, 128, 32, 32]          [1, 176, 32, 32]
20
│    │    └─Sequential: 3-6                             [1, 176, 32, 32]          [1, 304, 16, 16]
21
│    │    │    └─InvertedResidual: 4-28 -> 4-36         [1, 176, 32, 32]          [1, 304, 16, 16]
22
│    │    └─Sequential: 3-7                             [1, 304, 16, 16]          [1, 512, 16, 16]
23
│    │    │    └─InvertedResidual: 4-37 -> 4-39         [1, 304, 16, 16]          [1, 512, 16, 16]
24
├─UnetDecoder: 1-2                                      [1, 3, 512, 512]          [1, 32, 512, 512]
25
│    └─Identity: 2-5                                    [1, 512, 16, 16]          [1, 512, 16, 16]
26
│    └─ModuleList: 2-1                                  --                        --
27
│    │    └─DecoderBlock: 3-8                           [1, 512, 16, 16]          [1, 512, 32, 32]
28
│    │    └─DecoderBlock: 3-9                           [1, 512, 32, 32]          [1, 256, 64, 64]
29
│    │    └─DecoderBlock: 3-10                          [1, 256, 64, 64]          [1, 128, 128, 128]
30
│    │    └─DecoderBlock: 3-11                          [1, 128, 128, 128]        [1, 64, 256, 256]
31
│    │    └─DecoderBlock: 3-12                          [1, 64, 256, 256]         [1, 32, 512, 512]
32
├─SegmentationHead: 1-3                                 [1, 32, 512, 512]         [1, 1, 512, 512]
33
│    └─Conv2d: 2-6                                      [1, 32, 512, 512]         [1, 1, 512, 512]
34
│    └─Identity: 2-7                                    [1, 1, 512, 512]          [1, 1, 512, 512]
35
│    └─Activation: 2-8                                  [1, 1, 512, 512]          [1, 1, 512, 512]
36
│    │    └─Identity: 3-13                              [1, 1, 512, 512]          [1, 1, 512, 512]
37
=========================================================================================================
38
39
====================================================================================================
40
Layer (type:depth-idx)         Abstracted Input Shape          Abstracted Output Shape
41
====================================================================================================
42
                                                          
43
DepthwiseSeparableConv: 1-1    [1, C, H, W]                    [1, C, H, W]
44
   └─Conv2d: 2-1               [1, C, H, W]                    [1, C, H, W]
45
   └─BatchNorm2d: 2-2          [1, C, H, W]                    [1, C, H, W]
46
   └─Swish: 2-3                [1, C, H, W]                    [1, C, H, W]
47
   └─SqueezeExcite: 2-4        [1, C, H, W]                    [1, C, H, W]
48
   └─Conv2d: 2-5               [1, C, H, W]                    [1, C, H, W]
49
   └─BatchNorm2d: 2-6          [1, C, H, W]                    [1, C, H, W]
50
   └─Identity: 2-7             [1, C, H, W]                    [1, C, H, W]
51
52
InvertedResidual: 1-1          [1, C2, H2, W2]                 [1, C2, H2, W2]
53
   └─Conv2d: 2-1               [1, C2, H2, W2]                 [1, C2 x 6, H2, W2]
54
   └─BatchNorm2d: 2-2          [1, C2 x 6, H2, W2]             [1, C2 x 6, H2, W2]
55
   └─Swish: 2-3                [1, C2 x 6, H2, W2]             [1, C2 x 6, H2, W2]
56
   └─Conv2d: 2-4               [1, C2 x 6, H2, W2]             [1, C2 x 6, H2, W2]
57
   └─BatchNorm2d: 2-5          [1, C2 x 6, H2, W2]             [1, C2 x 6, H2, W2]
58
   └─Swish: 2-6                [1, C2 x 6, H2, W2]             [1, C2 x 6, H2, W2]
59
   └─SqueezeExcite: 2-7        [1, C2 x 6, H2, W2]             [1, C2 x 6, H2, W2]
60
   └─Conv2d: 2-8               [1, C2 x 6, H2, W2]             [1, C2, H2, W2]
61
   └─BatchNorm2d: 2-9          [1, C2, H2, W2]                 [1, C2, H2, W2]
62
63
DecoderBlock: 1-1              [1, C3 x 8, H3, W3]             [1, C3 x 4, H3 x 2,  W3 x 2]
64
   └─Attention: 2-1            [1, C3 x 9, H3 x 2,  W3 x 2]    [1, C3 x 9, H3 x 2,  W3 x 2]
65
   │    └─Identity: 3-1        [1, C3 x 9, H3 x 2,  W3 x 2]    [1, C3 x 9, H3 x 2,  W3 x 2]
66
   └─Conv2dReLU: 2-2           [1, C3 x 9, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
67
   │    └─Conv2d: 3-1          [1, C3 x 9, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
68
   │    └─BatchNorm2d: 3-2     [1, C3 x 4, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
69
   │    └─ReLU: 3-3            [1, C3 x 4, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
70
   └─Conv2dReLU: 2-3           [1, C3 x 4, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
71
   │    └─Conv2d: 3-1          [1, C3 x 4, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
72
   │    └─BatchNorm2d: 3-2     [1, C3 x 4, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
73
   │    └─ReLU: 3-3            [1, C3 x 4, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
74
   └─Attention: 2-4            [1, C3 x 4, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
75
   │    └─Identity: 2-1        [1, C3 x 4, H3 x 2,  W3 x 2]    [1, C3 x 4, H3 x 2,  W3 x 2]
76
====================================================================================================