-
Notifications
You must be signed in to change notification settings - Fork 67
/
Copy pathunet_blocks.py
59 lines (46 loc) · 2.21 KB
/
unet_blocks.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import torch
import torch.nn as nn
from torch.autograd import Variable
import numpy as np
from utils import init_weights
class DownConvBlock(nn.Module):
"""
A block of three convolutional layers where each layer is followed by a non-linear activation function
Between each block we add a pooling operation.
"""
def __init__(self, input_dim, output_dim, initializers, padding, pool=True):
super(DownConvBlock, self).__init__()
layers = []
if pool:
layers.append(nn.AvgPool2d(kernel_size=2, stride=2, padding=0, ceil_mode=True))
layers.append(nn.Conv2d(input_dim, output_dim, kernel_size=3, stride=1, padding=int(padding)))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(output_dim, output_dim, kernel_size=3, stride=1, padding=int(padding)))
layers.append(nn.ReLU(inplace=True))
layers.append(nn.Conv2d(output_dim, output_dim, kernel_size=3, stride=1, padding=int(padding)))
layers.append(nn.ReLU(inplace=True))
self.layers = nn.Sequential(*layers)
self.layers.apply(init_weights)
def forward(self, patch):
return self.layers(patch)
class UpConvBlock(nn.Module):
"""
A block consists of an upsampling layer followed by a convolutional layer to reduce the amount of channels and then a DownConvBlock
If bilinear is set to false, we do a transposed convolution instead of upsampling
"""
def __init__(self, input_dim, output_dim, initializers, padding, bilinear=True):
super(UpConvBlock, self).__init__()
self.bilinear = bilinear
if not self.bilinear:
self.upconv_layer = nn.ConvTranspose2d(input_dim, output_dim, kernel_size=2, stride=2)
self.upconv_layer.apply(init_weights)
self.conv_block = DownConvBlock(input_dim, output_dim, initializers, padding, pool=False)
def forward(self, x, bridge):
if self.bilinear:
up = nn.functional.interpolate(x, mode='bilinear', scale_factor=2, align_corners=True)
else:
up = self.upconv_layer(x)
assert up.shape[3] == bridge.shape[3]
out = torch.cat([up, bridge], 1)
out = self.conv_block(out)
return out