forked from smalik169/recursive-convolutional-autoencoder
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmodel.py
More file actions
92 lines (61 loc) · 2.84 KB
/
model.py
File metadata and controls
92 lines (61 loc) · 2.84 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import torch
import torch.nn as nn
from torch.autograd import Variable
class Encoder(nn.Module):
def __init__(self, dim, emb_dim, num_emb, activation = nn.ReLU(),
group_size=2, kernel_size=3, stride=1):
super(Encoder, self).__init__()
self.embeddings = nn.Embeddings(num_emb, emb_dim)
self.activation = activation
self.dim = dim
padding = (kernel_size // 2, kernel_size // 2 - 1 + kernel_size % 2)
self.prefix_group = nn.ModuleList([
nn.Conv1d(emb_dim, emb_dim, kernel_size, stride, padding)
for _ in range(group_size)])
self.recursion_group = nn.ModuleList([
nn.Conv1d(emb_dim, emb_dim, kernel_size, stride, padding)
for _ in range(group_size)])
self.max_pool = nn.MaxPool1d(kernel_size=2)
self.postfix_group = nn.ModuleList([
nn.Linear(dim, dim) for _ in range(group_size)])
def forward(self, data):
hid = self.embeddings(data)
for op in self.prefix_group:
hid = self.activation(op(hid))
while hid.size()[-1] > 4:
for op in self.recursion_group:
hid = self.activation(op(hid))
hid = self.max_pool(hid)
for op in self.postfix_group:
hid = self.activation(op(hid))
return hid.view([hid.size(0), self.dim])
class Decoder(nn.Module):
def __init__(self, input_dim, num_emb, feature_size,
activation = nn.ReLU(), group_size=2, kernel_size=3, stride=1):
super(Decoder, self).__init__()
self.activation = activation
self.dim = input_dim
padding = (kernel_size // 2, kernel_size // 2 - 1 + kernel_size % 2)
self.prefix_group = nn.ModuleList([
nn.Linear(dim, dim) for _ in range(group_size)])
self.expand_conv = nn.Conv1d(feature_size, 2 * feature_size, kernel_size, stride, padding)
self.recursion_group = nn.ModuleList([
nn.Conv1d(feature_size, feature_size, kernel_size, stride, padding)
for _ in range(group_size - 1)])
self.postfix_group = nn.ModuleList([
nn.Conv1d(feature_size, feature_size, kernel_size, stride, padding)
for _ in range(group_size)])
self.projection = nn.Linear(feature_size, num_emb)
def forward(self, data, rec_steps):
hid = data.view([hid.size(0), feature_size, -1])
for op in self.prefix_group:
hid = self.activation(op(hid))
for _ in range(rec_steps):
hid = self.activation(self.expand_conv(hid))
hid = hid.view([hid.size(0), hid.size(1) // 2, -1])
for op in self.recursion_group:
hid = self.activation(op(hid))
for op in self.postfix_group:
hid = self.activation(op(hid))
hid = self.projection(hid.transpose(1, 2))
return hid