-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmsnet.py
72 lines (62 loc) · 2.21 KB
/
msnet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# MSNEt from https://github.com/bill317996/Melody-extraction-with-melodic-segnet/blob/master/MSnet/model.py
# We only use the vocal version
import torch
import torch.nn as nn
import torch.nn.functional as F
class MSnet(nn.Module):
def __init__(self):
super(MSnet, self).__init__()
self.conv1 = nn.Sequential(
nn.BatchNorm2d(3),
nn.Conv2d(3, 32, 5, padding=2),
nn.SELU()
)
self.pool1 = nn.MaxPool2d((4,1), return_indices=True)
self.conv2 = nn.Sequential(
nn.BatchNorm2d(32),
nn.Conv2d(32, 64, 5, padding=2),
nn.SELU()
)
self.pool2 = nn.MaxPool2d((3,1), return_indices=True)
self.conv3 = nn.Sequential(
nn.BatchNorm2d(64),
nn.Conv2d(64, 128, 5, padding=2),
nn.SELU()
)
self.pool3 = nn.MaxPool2d((6,1), return_indices=True)
self.bottom = nn.Sequential(
nn.BatchNorm2d(128),
nn.Conv2d(128, 1, 5, padding=(0,2)),
nn.SELU()
)
self.up_pool3 = nn.MaxUnpool2d((6,1))
self.up_conv3 = nn.Sequential(
nn.BatchNorm2d(128),
nn.Conv2d(128, 64, 5, padding=2),
nn.SELU()
)
self.up_pool2 = nn.MaxUnpool2d((3,1))
self.up_conv2 = nn.Sequential(
nn.BatchNorm2d(64),
nn.Conv2d(64, 32, 5, padding=2),
nn.SELU()
)
self.up_pool1 = nn.MaxUnpool2d((4,1))
self.up_conv1 = nn.Sequential(
nn.BatchNorm2d(32),
nn.Conv2d(32, 1, 5, padding=2),
nn.SELU()
)
self.softmax = nn.Softmax(dim=2)
def forward(self, x):
c1, ind1 = self.pool1(self.conv1(x))
c2, ind2 = self.pool2(self.conv2(c1))
c3, ind3 = self.pool3(self.conv3(c2))
bm = self.bottom(c3)
u3 = self.up_conv3(self.up_pool3(c3, ind3))
u2 = self.up_conv2(self.up_pool2(u3, ind2))
u1 = self.up_conv1(self.up_pool1(u2, ind1))
output_pre = torch.cat((bm, u1), dim=2)
output = self.softmax(torch.cat((bm, u1), dim=2))
# output = torch.cat((bm, u1), dim=2)
return output, output_pre