smooth_l1_loss.py 2.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960
  1. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. import paddle
  18. import paddle.nn as nn
  19. import paddle.nn.functional as F
  20. from ppdet.core.workspace import register
  21. __all__ = ['SmoothL1Loss']
  22. @register
  23. class SmoothL1Loss(nn.Layer):
  24. """Smooth L1 Loss.
  25. Args:
  26. beta (float): controls smooth region, it becomes L1 Loss when beta=0.0
  27. loss_weight (float): the final loss will be multiplied by this
  28. """
  29. def __init__(self,
  30. beta=1.0,
  31. loss_weight=1.0):
  32. super(SmoothL1Loss, self).__init__()
  33. assert beta >= 0
  34. self.beta = beta
  35. self.loss_weight = loss_weight
  36. def forward(self, pred, target, reduction='none'):
  37. """forward function, based on fvcore.
  38. Args:
  39. pred (Tensor): prediction tensor
  40. target (Tensor): target tensor, pred.shape must be the same as target.shape
  41. reduction (str): the way to reduce loss, one of (none, sum, mean)
  42. """
  43. assert reduction in ('none', 'sum', 'mean')
  44. target = target.detach()
  45. if self.beta < 1e-5:
  46. loss = paddle.abs(pred - target)
  47. else:
  48. n = paddle.abs(pred - target)
  49. cond = n < self.beta
  50. loss = paddle.where(cond, 0.5 * n ** 2 / self.beta, n - 0.5 * self.beta)
  51. if reduction == 'mean':
  52. loss = loss.mean() if loss.size > 0 else 0.0 * loss.sum()
  53. elif reduction == 'sum':
  54. loss = loss.sum()
  55. return loss * self.loss_weight