-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathsfl.py
More file actions
70 lines (60 loc) · 2.18 KB
/
sfl.py
File metadata and controls
70 lines (60 loc) · 2.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import torch.nn as nn
import torch.nn.functional as F
from mmdet.models.losses.utils import weighted_loss
@weighted_loss
def quality_focal_loss_with_prob(pred, target, beta=2.0):
label, score, weight = target
# negatives are supervised by 0 quality score
pred_sigmoid = pred
scale_factor = pred_sigmoid
zerolabel = scale_factor.new_zeros(pred.shape)
loss = F.binary_cross_entropy(
pred, zerolabel, reduction='none') * scale_factor.pow(beta) * 0.75
# FG cat_id: [0, num_classes -1], BG cat_id: num_classes
bg_class_ind = pred.size(1)
pos = ((label >= 0) & (label < bg_class_ind)).nonzero().squeeze(1)
pos_label = label[pos].long()
loss[pos, pos_label] = F.binary_cross_entropy(
pred[pos, pos_label], score[pos],
reduction='none') * weight[pos]
loss = loss.sum(-1)
return loss
class SoftFocalLoss(nn.Module):
def __init__(self,
use_sigmoid=True,
beta=2.0,
reduction='mean',
loss_weight=1.0,
activated=True):
super(SoftFocalLoss, self).__init__()
assert use_sigmoid is True, 'Only sigmoid in SFL supported now.'
self.use_sigmoid = use_sigmoid
self.beta = beta
self.reduction = reduction
self.loss_weight = loss_weight
self.activated = activated
def forward(self,
pred,
target,
weight=None,
avg_factor=None,
reduction_override=None):
assert reduction_override in (None, 'none', 'mean', 'sum')
reduction = (
reduction_override if reduction_override else self.reduction)
if self.use_sigmoid:
if self.activated:
calculate_loss_func = quality_focal_loss_with_prob
else:
raise NotImplementedError
loss_cls = self.loss_weight * calculate_loss_func(
pred,
target,
weight,
reduction=reduction,
avg_factor=avg_factor,
beta=self.beta,
)
else:
raise NotImplementedError
return loss_cls