multilabelloss.py 1.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. import paddle
  2. import paddle.nn as nn
  3. import paddle.nn.functional as F
  4. class MultiLabelLoss(nn.Layer):
  5. """
  6. Multi-label loss
  7. """
  8. def __init__(self, epsilon=None):
  9. super().__init__()
  10. if epsilon is not None and (epsilon <= 0 or epsilon >= 1):
  11. epsilon = None
  12. self.epsilon = epsilon
  13. def _labelsmoothing(self, target, class_num):
  14. if target.ndim == 1 or target.shape[-1] != class_num:
  15. one_hot_target = F.one_hot(target, class_num)
  16. else:
  17. one_hot_target = target
  18. soft_target = F.label_smooth(one_hot_target, epsilon=self.epsilon)
  19. soft_target = paddle.reshape(soft_target, shape=[-1, class_num])
  20. return soft_target
  21. def _binary_crossentropy(self, input, target, class_num):
  22. if self.epsilon is not None:
  23. target = self._labelsmoothing(target, class_num)
  24. cost = F.binary_cross_entropy_with_logits(
  25. logit=input, label=target)
  26. else:
  27. cost = F.binary_cross_entropy_with_logits(
  28. logit=input, label=target)
  29. return cost
  30. def forward(self, x, target):
  31. if isinstance(x, dict):
  32. x = x["logits"]
  33. class_num = x.shape[-1]
  34. loss = self._binary_crossentropy(x, target, class_num)
  35. loss = loss.mean()
  36. return {"MultiLabelLoss": loss}