faisser.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352
  1. # Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import pickle
  15. from pathlib import Path
  16. import faiss
  17. import numpy as np
  18. from ....utils import logging
  19. from ...utils.io import YAMLReader, YAMLWriter
  20. class IndexData:
  21. VECTOR_FN = "vector"
  22. VECTOR_SUFFIX = ".index"
  23. IDMAP_FN = "id_map"
  24. IDMAP_SUFFIX = ".yaml"
  25. def __init__(self, index, index_info):
  26. self._index = index
  27. self._index_info = index_info
  28. self._id_map = index_info["id_map"]
  29. self._metric_type = index_info["metric_type"]
  30. self._index_type = index_info["index_type"]
  31. @property
  32. def index(self):
  33. return self._index
  34. @property
  35. def index_bytes(self):
  36. return faiss.serialize_index(self._index)
  37. @property
  38. def id_map(self):
  39. return self._id_map
  40. @property
  41. def metric_type(self):
  42. return self._metric_type
  43. @property
  44. def index_type(self):
  45. return self._index_type
  46. @property
  47. def index_info(self):
  48. return {
  49. "index_type": self.index_type,
  50. "metric_type": self.metric_type,
  51. "id_map": self._convert_int(self.id_map),
  52. }
  53. @classmethod
  54. def from_bytes(cls, bytes):
  55. tup = pickle.loads(bytes)
  56. index = faiss.deserialize_index(tup[0])
  57. return cls(index, tup[1])
  58. def to_bytes(self):
  59. tup = (faiss.serialize_index(self._index), self.index_info)
  60. return pickle.dumps(tup)
  61. def _convert_int(self, id_map):
  62. return {int(k): str(v) for k, v in id_map.items()}
  63. @staticmethod
  64. def _convert_int64(id_map):
  65. return {np.int64(k): str(v) for k, v in id_map.items()}
  66. def save(self, save_dir):
  67. save_dir = Path(save_dir)
  68. save_dir.mkdir(parents=True, exist_ok=True)
  69. vector_path = (save_dir / f"{self.VECTOR_FN}{self.VECTOR_SUFFIX}").as_posix()
  70. index_info_path = (save_dir / f"{self.IDMAP_FN}{self.IDMAP_SUFFIX}").as_posix()
  71. if self.metric_type in FaissBuilder.BINARY_METRIC_TYPE:
  72. faiss.write_index_binary(self.index, vector_path)
  73. else:
  74. faiss.write_index(self.index, vector_path)
  75. yaml_writer = YAMLWriter()
  76. yaml_writer.write(
  77. index_info_path,
  78. self.index_info,
  79. default_flow_style=False,
  80. allow_unicode=True,
  81. )
  82. @classmethod
  83. def load(cls, index):
  84. if isinstance(index, str):
  85. index_root = Path(index)
  86. vector_path = index_root / f"{cls.VECTOR_FN}{cls.VECTOR_SUFFIX}"
  87. index_info_path = index_root / f"{cls.IDMAP_FN}{cls.IDMAP_SUFFIX}"
  88. assert (
  89. vector_path.exists()
  90. ), f"Not found the {cls.VECTOR_FN}{cls.VECTOR_SUFFIX} file in {index}!"
  91. assert (
  92. index_info_path.exists()
  93. ), f"Not found the {cls.IDMAP_FN}{cls.IDMAP_SUFFIX} file in {index}!"
  94. yaml_reader = YAMLReader()
  95. index_info = yaml_reader.read(index_info_path)
  96. assert (
  97. "id_map" in index_info
  98. and "metric_type" in index_info
  99. and "index_type" in index_info
  100. ), f"The index_info file({index_info_path}) may have been damaged, `id_map` or `metric_type` or `index_type` not found in `index_info`."
  101. id_map = IndexData._convert_int64(index_info["id_map"])
  102. if index_info["metric_type"] in FaissBuilder.BINARY_METRIC_TYPE:
  103. index = faiss.read_index_binary(vector_path.as_posix())
  104. else:
  105. index = faiss.read_index(vector_path.as_posix())
  106. assert index.ntotal == len(
  107. id_map
  108. ), "data number in index is not equal in in id_map"
  109. return index, id_map, index_info["metric_type"], index_info["index_type"]
  110. else:
  111. assert isinstance(index, IndexData)
  112. return index.index, index.id_map, index.metric_type, index.index_type
  113. class FaissIndexer:
  114. def __init__(
  115. self,
  116. index,
  117. ):
  118. super().__init__()
  119. self._indexer, self.id_map, self.metric_type, index_type = IndexData.load(index)
  120. def __call__(self, feature, score_thres, hamming_radius, topk):
  121. scores_list, ids_list = self._indexer.search(np.array(feature), topk)
  122. preds = []
  123. for scores, ids in zip(scores_list, ids_list):
  124. preds.append({"score": [], "label": []})
  125. for score, id in zip(scores, ids):
  126. if id >= 0:
  127. preds[-1]["score"].append(score)
  128. preds[-1]["label"].append(self.id_map[id])
  129. if self.metric_type in FaissBuilder.BINARY_METRIC_TYPE:
  130. idxs = np.where(scores_list[:, 0] > hamming_radius)[0]
  131. else:
  132. idxs = np.where(scores_list[:, 0] < score_thres)[0]
  133. for idx in idxs:
  134. preds[idx] = {"score": None, "label": None}
  135. return preds
  136. class FaissBuilder:
  137. SUPPORT_METRIC_TYPE = ("hamming", "IP", "L2")
  138. SUPPORT_INDEX_TYPE = ("Flat", "IVF", "HNSW32")
  139. BINARY_METRIC_TYPE = ("hamming",)
  140. BINARY_SUPPORT_INDEX_TYPE = ("Flat", "IVF", "BinaryHash")
  141. @classmethod
  142. def _get_index_type(cls, metric_type, index_type, num=None):
  143. # if IVF method, cal ivf number automaticlly
  144. if index_type == "IVF":
  145. index_type = index_type + str(min(int(num // 8), 65536))
  146. if metric_type in cls.BINARY_METRIC_TYPE:
  147. index_type += ",BFlat"
  148. else:
  149. index_type += ",Flat"
  150. # for binary index, add B at head of index_type
  151. if metric_type in cls.BINARY_METRIC_TYPE:
  152. assert (
  153. index_type in cls.BINARY_SUPPORT_INDEX_TYPE
  154. ), f"The metric type({metric_type}) only support {cls.BINARY_SUPPORT_INDEX_TYPE} index types!"
  155. index_type = "B" + index_type
  156. if index_type == "HNSW32":
  157. logging.warning("The HNSW32 method dose not support 'remove' operation")
  158. index_type = "HNSW32"
  159. if index_type == "Flat":
  160. index_type = "Flat"
  161. return index_type
  162. @classmethod
  163. def _get_metric_type(cls, metric_type):
  164. if metric_type == "hamming":
  165. return faiss.METRIC_Hamming
  166. elif metric_type == "jaccard":
  167. return faiss.METRIC_Jaccard
  168. elif metric_type == "IP":
  169. return faiss.METRIC_INNER_PRODUCT
  170. elif metric_type == "L2":
  171. return faiss.METRIC_L2
  172. @classmethod
  173. def build(
  174. cls,
  175. gallery_imgs,
  176. gallery_label,
  177. predict_func,
  178. metric_type="IP",
  179. index_type="HNSW32",
  180. ):
  181. assert (
  182. index_type in cls.SUPPORT_INDEX_TYPE
  183. ), f"Supported index types only: {cls.SUPPORT_INDEX_TYPE}!"
  184. assert (
  185. metric_type in cls.SUPPORT_METRIC_TYPE
  186. ), f"Supported metric types only: {cls.SUPPORT_METRIC_TYPE}!"
  187. if isinstance(gallery_label, str):
  188. gallery_docs, gallery_list = cls.load_gallery(gallery_label, gallery_imgs)
  189. else:
  190. gallery_docs, gallery_list = gallery_label, gallery_imgs
  191. features = [res["feature"] for res in predict_func(gallery_list)]
  192. dtype = np.uint8 if metric_type in cls.BINARY_METRIC_TYPE else np.float32
  193. features = np.array(features).astype(dtype)
  194. vector_num, vector_dim = features.shape
  195. if metric_type in cls.BINARY_METRIC_TYPE:
  196. index = faiss.index_binary_factory(
  197. vector_dim,
  198. cls._get_index_type(metric_type, index_type, vector_num),
  199. cls._get_metric_type(metric_type),
  200. )
  201. else:
  202. index = faiss.index_factory(
  203. vector_dim,
  204. cls._get_index_type(metric_type, index_type, vector_num),
  205. cls._get_metric_type(metric_type),
  206. )
  207. index = faiss.IndexIDMap2(index)
  208. ids = {}
  209. # calculate id for new data
  210. index, ids = cls._add_gallery(
  211. metric_type, index, ids, features, gallery_docs, mode="new"
  212. )
  213. return IndexData(
  214. index, {"id_map": ids, "metric_type": metric_type, "index_type": index_type}
  215. )
  216. @classmethod
  217. def remove(
  218. cls,
  219. remove_ids,
  220. index,
  221. ):
  222. index, ids, metric_type, index_type = IndexData.load(index)
  223. if index_type == "HNSW32":
  224. raise RuntimeError(
  225. "The index_type: HNSW32 dose not support 'remove' operation"
  226. )
  227. if isinstance(remove_ids, str):
  228. lines = []
  229. with open(remove_ids) as f:
  230. lines = f.readlines()
  231. remove_ids = []
  232. for line in lines:
  233. id_ = int(line.strip().split(" ")[0])
  234. remove_ids.append(id_)
  235. remove_ids = np.asarray(remove_ids)
  236. else:
  237. remove_ids = np.asarray(remove_ids)
  238. # remove ids in id_map, remove index data in faiss index
  239. index.remove_ids(remove_ids)
  240. ids = {k: v for k, v in ids.items() if k not in remove_ids}
  241. return IndexData(
  242. index, {"id_map": ids, "metric_type": metric_type, "index_type": index_type}
  243. )
  244. @classmethod
  245. def append(cls, gallery_imgs, gallery_label, predict_func, index):
  246. index, ids, metric_type, index_type = IndexData.load(index)
  247. assert (
  248. metric_type in cls.SUPPORT_METRIC_TYPE
  249. ), f"Supported metric types only: {cls.SUPPORT_METRIC_TYPE}!"
  250. if isinstance(gallery_label, str):
  251. gallery_docs, gallery_list = cls.load_gallery(gallery_label, gallery_imgs)
  252. else:
  253. gallery_docs, gallery_list = gallery_label, gallery_imgs
  254. features = [res["feature"] for res in predict_func(gallery_list)]
  255. dtype = np.uint8 if metric_type in cls.BINARY_METRIC_TYPE else np.float32
  256. features = np.array(features).astype(dtype)
  257. # calculate id for new data
  258. index, ids = cls._add_gallery(
  259. metric_type, index, ids, features, gallery_docs, mode="append"
  260. )
  261. return IndexData(
  262. index, {"id_map": ids, "metric_type": metric_type, "index_type": index_type}
  263. )
  264. @classmethod
  265. def _add_gallery(
  266. cls, metric_type, index, ids, gallery_features, gallery_docs, mode
  267. ):
  268. start_id = max(ids.keys()) + 1 if ids else 0
  269. ids_now = (np.arange(0, len(gallery_docs)) + start_id).astype(np.int64)
  270. # only train when new index file
  271. if mode == "new":
  272. if metric_type in cls.BINARY_METRIC_TYPE:
  273. index.add(gallery_features)
  274. else:
  275. index.train(gallery_features)
  276. if metric_type not in cls.BINARY_METRIC_TYPE:
  277. index.add_with_ids(gallery_features, ids_now)
  278. # TODO(gaotingquan): how append when using hamming metric type
  279. # else:
  280. # pass
  281. for i, d in zip(list(ids_now), gallery_docs):
  282. ids[i] = d
  283. return index, ids
  284. @classmethod
  285. def load_gallery(cls, gallery_label_path, gallery_imgs_root="", delimiter=" "):
  286. lines = []
  287. files = []
  288. labels = []
  289. root = Path(gallery_imgs_root)
  290. with open(gallery_label_path, "r", encoding="utf-8") as f:
  291. lines = f.readlines()
  292. for line in lines:
  293. path, label = line.strip().split(delimiter)
  294. file_path = root / path
  295. files.append(file_path.as_posix())
  296. labels.append(label)
  297. return labels, files