predictor.py 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208
  1. # !/usr/bin/env python3
  2. # -*- coding: UTF-8 -*-
  3. ################################################################################
  4. #
  5. # Copyright (c) 2024 Baidu.com, Inc. All Rights Reserved
  6. #
  7. ################################################################################
  8. """
  9. Author: PaddlePaddle Authors
  10. """
  11. import os
  12. from copy import deepcopy
  13. from abc import ABC, abstractmethod
  14. from .kernel_option import PaddleInferenceOption
  15. from .utils.paddle_inference_predictor import _PaddleInferencePredictor
  16. from .utils.mixin import FromDictMixin
  17. from .utils.batch import batchable_method, Batcher
  18. from .utils.node import Node
  19. from .utils.official_models import official_models
  20. from ....utils.device import get_device
  21. from ....utils import logging
  22. from ....utils.config import AttrDict
  23. class BasePredictor(ABC, FromDictMixin, Node):
  24. """ Base Predictor """
  25. __is_base = True
  26. MODEL_FILE_TAG = 'inference'
  27. def __init__(self,
  28. model_dir,
  29. kernel_option,
  30. output,
  31. pre_transforms=None,
  32. post_transforms=None):
  33. super().__init__()
  34. self.model_dir = model_dir
  35. self.kernel_option = kernel_option
  36. self.output = output
  37. self.other_src = self.load_other_src()
  38. logging.debug(
  39. f"-------------------- {self.__class__.__name__} --------------------\n\
  40. Model: {self.model_dir}\n\
  41. Env: {self.kernel_option}")
  42. self.pre_tfs, self.post_tfs = self.build_transforms(pre_transforms,
  43. post_transforms)
  44. param_path = os.path.join(model_dir, f"{self.MODEL_FILE_TAG}.pdiparams")
  45. model_path = os.path.join(model_dir, f"{self.MODEL_FILE_TAG}.pdmodel")
  46. self._predictor = _PaddleInferencePredictor(
  47. param_path=param_path, model_path=model_path, option=kernel_option)
  48. def build_transforms(self, pre_transforms, post_transforms):
  49. """ build pre-transforms and post-transforms
  50. """
  51. pre_tfs = pre_transforms if pre_transforms is not None else self._get_pre_transforms_from_config(
  52. )
  53. logging.debug(f"Preprocess Ops: {self._format_transforms(pre_tfs)}")
  54. post_tfs = post_transforms if post_transforms is not None else self._get_post_transforms_from_config(
  55. )
  56. logging.debug(f"Postprocessing: {self._format_transforms(post_tfs)}")
  57. return pre_tfs, post_tfs
  58. def predict(self, input, batch_size=1):
  59. """ predict """
  60. if not isinstance(input, dict) and not (isinstance(input, list) and all(
  61. isinstance(ele, dict) for ele in input)):
  62. raise TypeError(f"`input` should be a dict or a list of dicts.")
  63. orig_input = input
  64. if isinstance(input, dict):
  65. input = [input]
  66. output = []
  67. for mini_batch in Batcher(input, batch_size=batch_size):
  68. mini_batch = self._preprocess(
  69. mini_batch, pre_transforms=self.pre_tfs)
  70. for data in mini_batch:
  71. self.check_input_keys(data)
  72. mini_batch = self._run(batch_input=mini_batch)
  73. for data in mini_batch:
  74. self.check_output_keys(data)
  75. mini_batch = self._postprocess(
  76. mini_batch, post_transforms=self.post_tfs)
  77. output.extend(mini_batch)
  78. if isinstance(orig_input, dict):
  79. return output[0]
  80. else:
  81. return output
  82. @abstractmethod
  83. def _run(self, batch_input):
  84. raise NotImplementedError
  85. @abstractmethod
  86. def _get_pre_transforms_from_config(self):
  87. """ get preprocess transforms """
  88. raise NotImplementedError
  89. @abstractmethod
  90. def _get_post_transforms_from_config(self):
  91. """ get postprocess transforms """
  92. raise NotImplementedError
  93. @batchable_method
  94. def _preprocess(self, data, pre_transforms):
  95. """ preprocess """
  96. for tf in pre_transforms:
  97. data = tf(data)
  98. return data
  99. @batchable_method
  100. def _postprocess(self, data, post_transforms):
  101. """ postprocess """
  102. for tf in post_transforms:
  103. data = tf(data)
  104. return data
  105. def _format_transforms(self, transforms):
  106. """ format transforms """
  107. ops_str = ", ".join([str(tf) for tf in transforms])
  108. return f"[{ops_str}]"
  109. def load_other_src(self):
  110. """ load other source
  111. """
  112. return None
  113. def get_input_keys(self):
  114. """get keys of input dict
  115. """
  116. return self.pre_tfs[0].get_input_keys()
  117. class PredictorBuilderByConfig(object):
  118. """build model predictor
  119. """
  120. def __init__(self, config):
  121. """
  122. Args:
  123. config (AttrDict): PaddleX pipeline config, which is loaded from pipeline yaml file.
  124. """
  125. model_name = config.Global.model
  126. device = config.Global.device
  127. predict_config = deepcopy(config.Predict)
  128. model_dir = predict_config.pop('model_dir')
  129. kernel_setting = predict_config.pop('kernel_option', {})
  130. kernel_setting.setdefault('device', device)
  131. kernel_option = PaddleInferenceOption(**kernel_setting)
  132. self.input_path = predict_config.pop('input_path')
  133. self.predictor = BasePredictor.get(model_name)(
  134. model_dir=model_dir,
  135. kernel_option=kernel_option,
  136. output=config.Global.output,
  137. **predict_config)
  138. def predict(self):
  139. """predict
  140. """
  141. self.predictor.predict({'input_path': self.input_path})
  142. def build_predictor(*args, **kwargs):
  143. """build predictor by config for dev
  144. """
  145. return PredictorBuilderByConfig(*args, **kwargs)
  146. def create_model(model_name,
  147. model_dir=None,
  148. kernel_option=None,
  149. output="./",
  150. pre_transforms=None,
  151. post_transforms=None,
  152. *args,
  153. **kwargs):
  154. """create model for predicting using inference model
  155. """
  156. kernel_option = PaddleInferenceOption(
  157. ) if kernel_option is None else kernel_option
  158. if model_dir is None:
  159. if model_name in official_models:
  160. model_dir = official_models[model_name]
  161. else:
  162. # model name is invalid
  163. BasePredictor.get(model_name)
  164. return BasePredictor.get(model_name)(model_dir=model_dir,
  165. kernel_option=kernel_option,
  166. output=output,
  167. pre_transforms=pre_transforms,
  168. post_transforms=post_transforms,
  169. *args,
  170. **kwargs)