| |
| import torch |
| import torch.nn as nn |
| import torch.nn.functional as F |
| import torch.optim as optim |
|
|
| from sklearn.metrics import recall_score, precision_score, accuracy_score |
|
|
|
|
| class SingleLabelTaskHead(nn.Module): |
| def __init__(self, input_size, output_size, device): |
| super(SingleLabelTaskHead, self).__init__() |
| self.fc1 = nn.Linear(input_size, 50) |
| self.fc2 = nn.Linear(50, 50) |
| self.fc3 = nn.Linear(50, output_size) |
| self.softmax = nn.Softmax(dim=1) |
| self.device = device |
|
|
| def forward(self, x): |
| x = F.relu(self.fc1(x)) |
| x = F.relu(self.fc2(x)) |
| x = self.fc3(x) |
| x = self.softmax(x) |
| return x |
|
|
| def predict(self, x): |
| x = self.forward(x) |
| x = torch.argmax(x, dim=1) |
| return x |
| |
| def accuracy(self, prediction, target): |
| prediction = torch.argmax(prediction, dim=1) |
| return torch.mean((prediction == target).float()) |
| |
| def recall(self, prediction, target): |
| prediction = torch.argmax(prediction, dim=1) |
| return recall_score(target.cpu().detach().numpy(), prediction.cpu().detach().numpy(), average='micro') |
| def precision(self, prediction, target): |
| prediction = torch.argmax(prediction, dim=1) |
| return precision_score(target.cpu().detach().numpy(), prediction.cpu().detach().numpy(), average='micro') |
|
|
|
|