-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathloss.py
29 lines (27 loc) · 1.01 KB
/
loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class reweight_loss(nn.Module):
def __init__(self):
super(reweight_loss, self).__init__()
def forward(self, out, T, target):
loss = 0.
out_softmax = F.softmax(out, dim=1)
for i in range(len(target)):
temp_softmax = out_softmax[i]
temp = out[i]
temp = torch.unsqueeze(temp, 0)
temp_softmax = torch.unsqueeze(temp_softmax, 0)
temp_target = target[i]
temp_target = torch.unsqueeze(temp_target, 0)
pro1 = temp_softmax[:, target[i]]
out_T = torch.matmul(T.t(), temp_softmax.t())
out_T = out_T.t()
pro2 = out_T[:, target[i]]
beta = pro1 / pro2
beta = Variable(beta, requires_grad=True)
cross_loss = F.cross_entropy(temp, temp_target)
_loss = beta * cross_loss
loss += _loss
return loss / len(target)