| |
|
|
| import torch |
| import torch.nn as nn |
| import torch.nn.functional as F |
| import torch.optim as optim |
|
|
|
|
| class base_network(nn.Module): |
| def __init__(self, input_size, embedding_size, hidden_size, num_layers, dropout, bidirectional, device): |
| super(base_network, self).__init__() |
| self.embedding = nn.Embedding(input_size, embedding_size) |
|
|
| self.lstm = nn.LSTM(embedding_size, hidden_size, num_layers, batch_first=True, |
| dropout=dropout, bidirectional=bidirectional) |
| |
| self.device = device |
|
|
| def forward(self, x): |
| x = x.to(self.device) |
| x = self.embedding(x) |
| x, (h_n, c_n) = self.lstm(x) |
| out = torch.permute(h_n[-2:, :, :], (1, 0, 2)).reshape(x.size(0), -1) |
| return out |
|
|