pp_shitu_v2.py 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import pickle
  15. from pathlib import Path
  16. import numpy as np
  17. from ..utils.io import ImageReader
  18. from ..components import CropByBoxes, FaissIndexer
  19. from ..components.retrieval.faiss import FaissBuilder
  20. from ..results import ShiTuResult
  21. from .base import BasePipeline
  22. class ShiTuV2Pipeline(BasePipeline):
  23. """ShiTuV2 Pipeline"""
  24. entities = "PP-ShiTuV2"
  25. def __init__(
  26. self,
  27. det_model,
  28. rec_model,
  29. det_batch_size=1,
  30. rec_batch_size=1,
  31. index_dir=None,
  32. metric_type="IP",
  33. score_thres=None,
  34. hamming_radius=None,
  35. return_k=5,
  36. device=None,
  37. predictor_kwargs=None,
  38. ):
  39. super().__init__(device, predictor_kwargs)
  40. self._build_predictor(det_model, rec_model)
  41. self.set_predictor(det_batch_size, rec_batch_size, device)
  42. self._metric_type, self._return_k, self._score_thres, self._hamming_radius = (
  43. metric_type,
  44. return_k,
  45. score_thres,
  46. hamming_radius,
  47. )
  48. self._indexer = self._build_indexer(index_dir=index_dir) if index_dir else None
  49. def _build_indexer(self, index_bytes=None, id_map=None, index_dir=None):
  50. if index_bytes is not None and id_map is not None:
  51. return FaissIndexer(
  52. index_bytes=index_bytes,
  53. id_map=id_map,
  54. metric_type=self._metric_type,
  55. return_k=self._return_k,
  56. score_thres=self._score_thres,
  57. hamming_radius=self._hamming_radius,
  58. )
  59. else:
  60. assert index_dir
  61. vector_path = (Path(index_dir) / "vector.index").as_posix()
  62. with open(Path(index_dir) / "id_map.pkl", "rb") as fd:
  63. id_map = pickle.load(fd)
  64. return FaissIndexer(
  65. vector_path=vector_path,
  66. id_map=id_map,
  67. metric_type=self._metric_type,
  68. return_k=self._return_k,
  69. score_thres=self._score_thres,
  70. hamming_radius=self._hamming_radius,
  71. )
  72. def _build_predictor(self, det_model, rec_model):
  73. self.det_model = self._create(model=det_model)
  74. self.rec_model = self._create(model=rec_model)
  75. self._crop_by_boxes = CropByBoxes()
  76. self._img_reader = ImageReader(backend="opencv")
  77. def set_predictor(self, det_batch_size=None, rec_batch_size=None, device=None):
  78. if det_batch_size:
  79. self.det_model.set_predictor(batch_size=det_batch_size)
  80. if rec_batch_size:
  81. self.rec_model.set_predictor(batch_size=rec_batch_size)
  82. if device:
  83. self.det_model.set_predictor(device=device)
  84. self.rec_model.set_predictor(device=device)
  85. def predict(self, input, index_bytes=None, id_map=None, index_dir=None, **kwargs):
  86. if index_bytes is not None or index_dir is not None:
  87. indexer = self._build_indexer(
  88. index_bytes=index_bytes, id_map=id_map, index_dir=index_dir
  89. )
  90. else:
  91. indexer = self._indexer
  92. assert indexer
  93. self.set_predictor(**kwargs)
  94. for det_res in self.det_model(input):
  95. rec_res = self.get_rec_result(det_res, indexer)
  96. yield self.get_final_result(det_res, rec_res)
  97. def get_rec_result(self, det_res, indexer):
  98. if len(det_res["boxes"]) == 0:
  99. full_img = self._img_reader.read(det_res["input_path"])
  100. w, h = full_img.shape[:2]
  101. det_res["boxes"].append(
  102. {
  103. "cls_id": 0,
  104. "label": "full_img",
  105. "score": 0,
  106. "coordinate": [0, 0, h, w],
  107. }
  108. )
  109. subs_of_img = list(self._crop_by_boxes(det_res))
  110. img_list = [img["img"] for img in subs_of_img]
  111. all_rec_res = list(self.rec_model(img_list))
  112. all_rec_res = next(indexer(all_rec_res))
  113. output = {"label": [], "score": []}
  114. for res in all_rec_res:
  115. output["label"].append(res["label"])
  116. output["score"].append(res["score"])
  117. return output
  118. def get_final_result(self, det_res, rec_res):
  119. single_img_res = {"input_path": det_res["input_path"], "boxes": []}
  120. for i, obj in enumerate(det_res["boxes"]):
  121. rec_scores = rec_res["score"][i]
  122. labels = rec_res["label"][i]
  123. single_img_res["boxes"].append(
  124. {
  125. "labels": labels,
  126. "rec_scores": rec_scores,
  127. "det_score": obj["score"],
  128. "coordinate": obj["coordinate"],
  129. }
  130. )
  131. return ShiTuResult(single_img_res)
  132. def _build_index(
  133. self,
  134. data_root,
  135. index_dir,
  136. mode="new",
  137. metric_type="IP",
  138. index_type="HNSW32",
  139. **kwargs,
  140. ):
  141. self.set_predictor(**kwargs)
  142. self._metric_type = metric_type if metric_type else self._metric_type
  143. builder = FaissBuilder(
  144. self.rec_model.predict,
  145. mode=mode,
  146. metric_type=self._metric_type,
  147. index_type=index_type,
  148. )
  149. if mode == "new":
  150. index_bytes, id_map = builder.build(
  151. Path(data_root) / "gallery.txt", data_root, index_dir
  152. )
  153. elif mode == "remove":
  154. index_bytes, id_map = builder.remove(
  155. Path(data_root) / "gallery.txt", data_root, index_dir
  156. )
  157. elif mode == "append":
  158. index_bytes, id_map = builder.append(
  159. Path(data_root) / "gallery.txt", data_root, index_dir
  160. )
  161. else:
  162. raise Exception("`mode` only support `new`, `remove` and `append`.")
  163. return index_bytes, id_map
  164. def build_index(
  165. self, data_root, index_dir, metric_type="IP", index_type="HNSW32", **kwargs
  166. ):
  167. return self._build_index(
  168. data_root=data_root,
  169. index_dir=index_dir,
  170. mode="new",
  171. metric_type=metric_type,
  172. index_type=index_type,
  173. **kwargs,
  174. )
  175. def remove_index(
  176. self, data_root, index_dir, metric_type="IP", index_type="HNSW32", **kwargs
  177. ):
  178. return self._build_index(
  179. data_root=data_root,
  180. index_dir=index_dir,
  181. mode="remove",
  182. metric_type=metric_type,
  183. index_type=index_type,
  184. **kwargs,
  185. )
  186. def append_index(
  187. self, data_root, index_dir, metric_type="IP", index_type="HNSW32", **kwargs
  188. ):
  189. return self._build_index(
  190. data_root=data_root,
  191. index_dir=index_dir,
  192. mode="append",
  193. metric_type=metric_type,
  194. index_type=index_type,
  195. **kwargs,
  196. )