-
Notifications
You must be signed in to change notification settings - Fork 13
Expand file tree
/
Copy pathlayers.py
More file actions
62 lines (55 loc) · 2 KB
/
layers.py
File metadata and controls
62 lines (55 loc) · 2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import torch
import torch.nn as nn
import torch.nn.functional as F
class blockCNN(nn.Module):
def __init__(self, in_nc, out_nc, kernel_size, padding, stride=1):
super(blockCNN, self).__init__()
self.in_nc = in_nc
self.out_nc = out_nc
self.kernel_size = kernel_size
self.padding = padding
# layers
self.conv = nn.Conv2d(in_nc, out_nc,
kernel_size=kernel_size,
stride=stride,
padding=padding)
self.bn = nn.BatchNorm2d(out_nc)
def forward(self, batch, use_bn=False, use_relu=False,
use_maxpool=False, maxpool_kernelsize=None):
"""
in:
batch - [batch_size, in_nc, H, W]
out:
batch - [batch_size, out_nc, H', W']
"""
batch = self.conv(batch)
if use_bn:
batch = self.bn(batch)
if use_relu:
batch = F.relu(batch)
if use_maxpool:
assert maxpool_kernelsize is not None
batch = F.max_pool2d(batch, kernel_size=maxpool_kernelsize, stride=2)
return batch
class blockRNN(nn.Module):
def __init__(self, in_size, hidden_size, out_size, bidirectional, dropout=0):
super(blockRNN, self).__init__()
self.in_size = in_size
self.hidden_size = hidden_size
self.out_size = out_size
self.bidirectional = bidirectional
# layers
self.gru = nn.GRU(in_size, hidden_size, bidirectional=bidirectional)
def forward(self, batch, add_output=False):
"""
in array:
batch - [seq_len , batch_size, in_size]
out array:
out - [seq_len , batch_size, out_size]
"""
batch_size = batch.size(1)
outputs, hidden = self.gru(batch)
out_size = int(outputs.size(2) / 2)
if add_output:
outputs = outputs[:, :, :out_size] + outputs[:, :, out_size:]
return outputs