-
Notifications
You must be signed in to change notification settings - Fork 0
/
customtrainer.py
49 lines (36 loc) · 1.54 KB
/
customtrainer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
from transformers import Trainer
import torch.nn as nn
import torch.nn.functional as F
import torch
class MultilabelTrainer(Trainer):
def compute_loss(self, model, inputs,return_outputs=False):
labels = inputs.pop("labels")
outputs = model(**inputs)
logits = outputs.logits
# print('logits : ',outputs.logits)
loss_fct = Cross_FocalLoss()
loss = loss_fct(logits,labels)
return (loss, outputs) if return_outputs else loss
class Cross_FocalLoss(nn.Module):
def __init__(self, weight=None,
gamma=2., reduction='mean', **kwargs):
nn.Module.__init__(self)
self.weight=weight
self.gamma = gamma
self.reduction = reduction
def forward(self, inputs,targets):
loss_fct_cross = nn.CrossEntropyLoss()
loss_cross = loss_fct_cross(inputs, targets)
loss_fct_focal = FocalLoss()
loss_focal = loss_fct_focal(inputs, targets)
return loss_cross*0.75+loss_focal*0.25
class FocalLoss(nn.modules.loss._WeightedLoss):
def __init__(self, weight=None, gamma=2,reduction='mean'):
super(FocalLoss, self).__init__(weight,reduction=reduction)
self.gamma = gamma
self.weight = weight #weight parameter will act as the alpha parameter to balance class weights
def forward(self, input, target):
ce_loss = F.cross_entropy(input, target,reduction=self.reduction,weight=self.weight)
pt = torch.exp(-ce_loss)
focal_loss = ((1 - pt) ** self.gamma * ce_loss).mean()
return focal_loss