deephashloss.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. #copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. #Licensed under the Apache License, Version 2.0 (the "License");
  4. #you may not use this file except in compliance with the License.
  5. #You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. #Unless required by applicable law or agreed to in writing, software
  10. #distributed under the License is distributed on an "AS IS" BASIS,
  11. #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. #See the License for the specific language governing permissions and
  13. #limitations under the License.
  14. import paddle
  15. import paddle.nn as nn
  16. class DSHSDLoss(nn.Layer):
  17. """
  18. # DSHSD(IEEE ACCESS 2019)
  19. # paper [Deep Supervised Hashing Based on Stable Distribution](https://ieeexplore.ieee.org/document/8648432/)
  20. # [DSHSD] epoch:70, bit:48, dataset:cifar10-1, MAP:0.809, Best MAP: 0.809
  21. # [DSHSD] epoch:250, bit:48, dataset:nuswide_21, MAP:0.809, Best MAP: 0.815
  22. # [DSHSD] epoch:135, bit:48, dataset:imagenet, MAP:0.647, Best MAP: 0.647
  23. """
  24. def __init__(self, alpha, multi_label=False):
  25. super(DSHSDLoss, self).__init__()
  26. self.alpha = alpha
  27. self.multi_label = multi_label
  28. def forward(self, input, label):
  29. feature = input["features"]
  30. logits = input["logits"]
  31. dist = paddle.sum(paddle.square(
  32. (paddle.unsqueeze(feature, 1) - paddle.unsqueeze(feature, 0))),
  33. axis=2)
  34. # label to ont-hot
  35. label = paddle.flatten(label)
  36. n_class = logits.shape[1]
  37. label = paddle.nn.functional.one_hot(label, n_class).astype("float32")
  38. s = (paddle.matmul(
  39. label, label, transpose_y=True) == 0).astype("float32")
  40. margin = 2 * feature.shape[1]
  41. Ld = (1 - s) / 2 * dist + s / 2 * (margin - dist).clip(min=0)
  42. Ld = Ld.mean()
  43. if self.multi_label:
  44. # multiple labels classification loss
  45. Lc = (logits - label * logits + (
  46. (1 + (-logits).exp()).log())).sum(axis=1).mean()
  47. else:
  48. # single labels classification loss
  49. Lc = (-paddle.nn.functional.softmax(logits).log() * label).sum(
  50. axis=1).mean()
  51. return {"dshsdloss": Lc + Ld * self.alpha}
  52. class LCDSHLoss(nn.Layer):
  53. """
  54. # paper [Locality-Constrained Deep Supervised Hashing for Image Retrieval](https://www.ijcai.org/Proceedings/2017/0499.pdf)
  55. # [LCDSH] epoch:145, bit:48, dataset:cifar10-1, MAP:0.798, Best MAP: 0.798
  56. # [LCDSH] epoch:183, bit:48, dataset:nuswide_21, MAP:0.833, Best MAP: 0.834
  57. """
  58. def __init__(self, n_class, _lambda):
  59. super(LCDSHLoss, self).__init__()
  60. self._lambda = _lambda
  61. self.n_class = n_class
  62. def forward(self, input, label):
  63. feature = input["features"]
  64. # label to ont-hot
  65. label = paddle.flatten(label)
  66. label = paddle.nn.functional.one_hot(label,
  67. self.n_class).astype("float32")
  68. s = 2 * (paddle.matmul(
  69. label, label, transpose_y=True) > 0).astype("float32") - 1
  70. inner_product = paddle.matmul(feature, feature, transpose_y=True) * 0.5
  71. inner_product = inner_product.clip(min=-50, max=50)
  72. L1 = paddle.log(1 + paddle.exp(-s * inner_product)).mean()
  73. b = feature.sign()
  74. inner_product_ = paddle.matmul(b, b, transpose_y=True) * 0.5
  75. sigmoid = paddle.nn.Sigmoid()
  76. L2 = (sigmoid(inner_product) - sigmoid(inner_product_)).pow(2).mean()
  77. return {"lcdshloss": L1 + self._lambda * L2}