mixed_loss.py 2.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758
  1. # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import numpy as np
  15. import paddle
  16. from paddle import nn
  17. import paddle.nn.functional as F
  18. from paddlex.paddleseg.cvlibs import manager
  19. @manager.LOSSES.add_component
  20. class MixedLoss(nn.Layer):
  21. """
  22. Weighted computations for multiple Loss.
  23. The advantage is that mixed loss training can be achieved without changing the networking code.
  24. Args:
  25. losses (list[nn.Layer]): A list consisting of multiple loss classes
  26. coef (list[float|int]): Weighting coefficient of multiple loss
  27. Returns:
  28. A callable object of MixedLoss.
  29. """
  30. def __init__(self, losses, coef):
  31. super(MixedLoss, self).__init__()
  32. if not isinstance(losses, list):
  33. raise TypeError('`losses` must be a list!')
  34. if not isinstance(coef, list):
  35. raise TypeError('`coef` must be a list!')
  36. len_losses = len(losses)
  37. len_coef = len(coef)
  38. if len_losses != len_coef:
  39. raise ValueError(
  40. 'The length of `losses` should equal to `coef`, but they are {} and {}.'
  41. .format(len_losses, len_coef))
  42. self.losses = losses
  43. self.coef = coef
  44. def forward(self, logits, labels):
  45. loss_list = []
  46. final_output = 0
  47. for i, loss in enumerate(self.losses):
  48. output = loss(logits, labels)
  49. final_output += output * self.coef[i]
  50. return final_output