config.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455
  1. # !/usr/bin/env python3
  2. # -*- coding: UTF-8 -*-
  3. ################################################################################
  4. #
  5. # Copyright (c) 2024 Baidu.com, Inc. All Rights Reserved
  6. #
  7. ################################################################################
  8. """
  9. Author: PaddlePaddle Authors
  10. """
  11. from ...base import BaseConfig
  12. from ....utils.misc import abspath
  13. from ....utils import logging
  14. from ..config_helper import PPDetConfigMixin
  15. class DetConfig(BaseConfig, PPDetConfigMixin):
  16. """ DetConfig """
  17. def load(self, config_path: str):
  18. """load the config from config file
  19. Args:
  20. config_path (str): the config file path.
  21. """
  22. dict_ = self.load_config_literally(config_path)
  23. self.reset_from_dict(dict_)
  24. def dump(self, config_path: str):
  25. """dump the config
  26. Args:
  27. config_path (str): the path to save dumped config.
  28. """
  29. self.dump_literal_config(config_path, self._dict)
  30. def update(self, dict_like_obj: list):
  31. """update self from dict
  32. Args:
  33. dict_like_obj (list): the list of pairs that contain key and value.
  34. """
  35. self.update_from_dict(dict_like_obj, self._dict)
  36. def update_dataset(self,
  37. dataset_path: str,
  38. dataset_type: str=None,
  39. *,
  40. data_fields: list[str]=None,
  41. image_dir: str="images",
  42. train_anno_path: str="annotations/instance_train.json",
  43. val_anno_path: str="annotations/instance_val.json",
  44. test_anno_path: str="annotations/instance_val.json"):
  45. """update dataset settings
  46. Args:
  47. dataset_path (str): the root path fo dataset.
  48. dataset_type (str, optional): the dataset type. Defaults to None.
  49. data_fields (list[str], optional): the data fields in dataset. Defaults to None.
  50. image_dir (str, optional): the images file directory that relative to `dataset_path`. Defaults to "images".
  51. train_anno_path (str, optional): the train annotations file that relative to `dataset_path`.
  52. Defaults to "annotations/instance_train.json".
  53. val_anno_path (str, optional): the validation annotations file that relative to `dataset_path`.
  54. Defaults to "annotations/instance_val.json".
  55. test_anno_path (str, optional): the test annotations file that relative to `dataset_path`.
  56. Defaults to "annotations/instance_val.json".
  57. Raises:
  58. ValueError: the `dataset_type` error.
  59. """
  60. dataset_path = abspath(dataset_path)
  61. if dataset_type is None:
  62. dataset_type = 'COCODetDataset'
  63. if dataset_type == 'COCODetDataset':
  64. ds_cfg = self._make_dataset_config(dataset_path, data_fields,
  65. image_dir, train_anno_path,
  66. val_anno_path, test_anno_path)
  67. self.set_val('metric', 'COCO')
  68. else:
  69. raise ValueError(f"{repr(dataset_type)} is not supported.")
  70. self.update(ds_cfg)
  71. def _make_dataset_config(
  72. self,
  73. dataset_root_path: str,
  74. data_fields: list[str, ]=None,
  75. image_dir: str="images",
  76. train_anno_path: str="annotations/instance_train.json",
  77. val_anno_path: str="annotations/instance_val.json",
  78. test_anno_path: str="annotations/instance_val.json") -> dict:
  79. """construct the dataset config that meets the format requirements
  80. Args:
  81. dataset_root_path (str): the root directory of dataset.
  82. data_fields (list[str,], optional): the data field. Defaults to None.
  83. image_dir (str, optional): _description_. Defaults to "images".
  84. train_anno_path (str, optional): _description_. Defaults to "annotations/instance_train.json".
  85. val_anno_path (str, optional): _description_. Defaults to "annotations/instance_val.json".
  86. test_anno_path (str, optional): _description_. Defaults to "annotations/instance_val.json".
  87. Returns:
  88. dict: the dataset config.
  89. """
  90. data_fields = ['image', 'gt_bbox', 'gt_class',
  91. 'is_crowd'] if data_fields is None else data_fields
  92. return {
  93. 'TrainDataset': {
  94. 'name': 'COCODetDataset',
  95. 'image_dir': image_dir,
  96. 'anno_path': train_anno_path,
  97. 'dataset_dir': dataset_root_path,
  98. 'data_fields': data_fields
  99. },
  100. 'EvalDataset': {
  101. 'name': 'COCODetDataset',
  102. 'image_dir': image_dir,
  103. 'anno_path': val_anno_path,
  104. 'dataset_dir': dataset_root_path
  105. },
  106. 'TestDataset': {
  107. 'name': 'ImageFolder',
  108. 'anno_path': test_anno_path,
  109. 'dataset_dir': dataset_root_path
  110. },
  111. }
  112. def update_ema(self,
  113. use_ema: bool,
  114. ema_decay: float=0.9999,
  115. ema_decay_type: str="exponential",
  116. ema_filter_no_grad: bool=True):
  117. """update EMA setting
  118. Args:
  119. use_ema (bool): whether or not to use EMA
  120. ema_decay (float, optional): value of EMA decay. Defaults to 0.9999.
  121. ema_decay_type (str, optional): type of EMA decay. Defaults to "exponential".
  122. ema_filter_no_grad (bool, optional): whether or not to filter the parameters
  123. that been set to stop gradient and are not batch norm parameters. Defaults to True.
  124. """
  125. self.update({
  126. 'use_ema': use_ema,
  127. 'ema_decay': ema_decay,
  128. 'ema_decay_type': ema_decay_type,
  129. 'ema_filter_no_grad': ema_filter_no_grad
  130. })
  131. def update_learning_rate(self, learning_rate: float):
  132. """update learning rate
  133. Args:
  134. learning_rate (float): the learning rate value to set.
  135. """
  136. self.LearningRate['base_lr'] = learning_rate
  137. def update_warmup_steps(self, warmup_steps: int):
  138. """update warmup steps
  139. Args:
  140. warmup_steps (int): the warmup steps value to set.
  141. """
  142. schedulers = self.LearningRate['schedulers']
  143. for sch in schedulers:
  144. key = 'name' if 'name' in sch else '_type_'
  145. if sch[key] == 'LinearWarmup':
  146. sch['steps'] = warmup_steps
  147. sch['epochs_first'] = False
  148. def update_warmup_enable(self, use_warmup: bool):
  149. """whether or not to enable learning rate warmup
  150. Args:
  151. use_warmup (bool): `True` is enable learning rate warmup and `False` is disable.
  152. """
  153. schedulers = self.LearningRate['schedulers']
  154. for sch in schedulers:
  155. if 'use_warmup' in sch:
  156. sch['use_warmup'] = use_warmup
  157. def update_cossch_epoch(self, max_epochs: int):
  158. """update max epochs of cosine learning rate scheduler
  159. Args:
  160. max_epochs (int): the max epochs value.
  161. """
  162. schedulers = self.LearningRate['schedulers']
  163. for sch in schedulers:
  164. key = 'name' if 'name' in sch else '_type_'
  165. if sch[key] == 'CosineDecay':
  166. sch['max_epochs'] = max_epochs
  167. def update_milestone(self, milestones: list[int]):
  168. """update milstone of `PiecewiseDecay` learning scheduler
  169. Args:
  170. milestones (list[int]): the list of milestone values of `PiecewiseDecay` learning scheduler.
  171. """
  172. schedulers = self.LearningRate['schedulers']
  173. for sch in schedulers:
  174. key = 'name' if 'name' in sch else '_type_'
  175. if sch[key] == 'PiecewiseDecay':
  176. sch['milestones'] = milestones
  177. def update_batch_size(self, batch_size: int, mode: str='train'):
  178. """update batch size setting
  179. Args:
  180. batch_size (int): the batch size number to set.
  181. mode (str, optional): the mode that to be set batch size, must be one of 'train', 'eval', 'test'.
  182. Defaults to 'train'.
  183. Raises:
  184. ValueError: mode error.
  185. """
  186. assert mode in ('train', 'eval', 'test'), \
  187. 'mode ({}) should be train, eval or test'.format(mode)
  188. if mode == 'train':
  189. self.TrainReader['batch_size'] = batch_size
  190. elif mode == 'eval':
  191. self.EvalReader['batch_size'] = batch_size
  192. else:
  193. self.TestReader['batch_size'] = batch_size
  194. def update_epochs(self, epochs: int):
  195. """update epochs setting
  196. Args:
  197. epochs (int): the epochs number value to set
  198. """
  199. self.update({'epoch': epochs})
  200. def update_device(self, device_type: str):
  201. """update device setting
  202. Args:
  203. device (str): the running device to set
  204. """
  205. if device_type.lower() == "gpu":
  206. self['use_gpu'] = True
  207. else:
  208. assert device_type.lower() == "cpu"
  209. self['use_gpu'] = False
  210. def update_save_dir(self, save_dir: str):
  211. """update directory to save outputs
  212. Args:
  213. save_dir (str): the directory to save outputs.
  214. """
  215. self['save_dir'] = abspath(save_dir)
  216. def update_log_interval(self, log_interval: int):
  217. """update log interval(steps)
  218. Args:
  219. log_interval (int): the log interval value to set.
  220. """
  221. self.update({'log_iter': log_interval})
  222. def update_eval_interval(self, eval_interval: int):
  223. """update eval interval(epochs)
  224. Args:
  225. eval_interval (int): the eval interval value to set.
  226. """
  227. self.update({'snapshot_epoch': eval_interval})
  228. def update_save_interval(self, save_interval: int):
  229. """update eval interval(epochs)
  230. Args:
  231. save_interval (int): the save interval value to set.
  232. """
  233. self.update({'snapshot_epoch': save_interval})
  234. def update_weights(self, weight_path: str):
  235. """update model weight
  236. Args:
  237. weight_path (str): the path to weight file of model.
  238. """
  239. self['weights'] = abspath(weight_path)
  240. def update_pretrained_weights(self, pretrain_weights: str):
  241. """update pretrained weight path
  242. Args:
  243. pretrained_model (str): the local path or url of pretrained weight file to set.
  244. """
  245. if not pretrain_weights.startswith(
  246. 'http://') and not pretrain_weights.startswith('https://'):
  247. pretrain_weights = abspath(pretrain_weights)
  248. self['pretrain_weights'] = pretrain_weights
  249. def update_num_class(self, num_classes: int):
  250. """update classes number
  251. Args:
  252. num_classes (int): the classes number value to set.
  253. """
  254. self['num_classes'] = num_classes
  255. def update_random_size(self, randomsize: list[list[int, int]]):
  256. """update `target_size` of `BatchRandomResize` op in TestReader
  257. Args:
  258. randomsize (list[list[int, int]]): the list of different size scales.
  259. """
  260. self.TestReader['batch_transforms']['BatchRandomResize'][
  261. 'target_size'] = randomsize
  262. def update_num_workers(self, num_workers: int):
  263. """update workers number of train and eval dataloader
  264. Args:
  265. num_workers (int): the value of train and eval dataloader workers number to set.
  266. """
  267. self['worker_num'] = num_workers
  268. def enable_shared_memory(self):
  269. """enable shared memory setting of train and eval dataloader
  270. """
  271. self.update({'TrainReader': {'use_shared_memory': True}})
  272. self.update({'EvalReader': {'use_shared_memory': True}})
  273. def disable_shared_memory(self):
  274. """disable shared memory setting of train and eval dataloader
  275. """
  276. self.update({'TrainReader': {'use_shared_memory': False}})
  277. self.update({'EvalReader': {'use_shared_memory': False}})
  278. def _recursively_set(self, config: dict, update_dict: dict):
  279. """recursively set config
  280. Args:
  281. config (dict): the original config.
  282. update_dict (dict): to be updated paramenters and its values
  283. Example:
  284. self._recursively_set(self.HybridEncoder, {'encoder_layer': {'dim_feedforward': 2048}})
  285. """
  286. assert isinstance(update_dict, dict)
  287. for key in update_dict:
  288. if key not in config:
  289. logging.info(
  290. f'A new filed of config to set found: {repr(key)}.')
  291. config[key] = update_dict[key]
  292. elif not isinstance(update_dict[key], dict):
  293. config[key] = update_dict[key]
  294. else:
  295. self._recursively_set(config[key], update_dict[key])
  296. def update_static_assigner_epochs(self, static_assigner_epochs: int):
  297. """update static assigner epochs value
  298. Args:
  299. static_assigner_epochs (int): the value of static assigner epochs
  300. """
  301. assert 'PicoHeadV2' in self
  302. self.PicoHeadV2['static_assigner_epoch'] = static_assigner_epochs
  303. def update_HybridEncoder(self, update_dict: dict):
  304. """update the HybridEncoder neck setting
  305. Args:
  306. update_dict (dict): the HybridEncoder setting.
  307. """
  308. assert 'HybridEncoder' in self
  309. self._recursively_set(self.HybridEncoder, update_dict)
  310. def get_epochs_iters(self) -> int:
  311. """get epochs
  312. Returns:
  313. int: the epochs value, i.e., `Global.epochs` in config.
  314. """
  315. return self.epoch
  316. def get_log_interval(self) -> int:
  317. """get log interval(steps)
  318. Returns:
  319. int: the log interval value, i.e., `Global.print_batch_step` in config.
  320. """
  321. self.log_iter
  322. def get_eval_interval(self) -> int:
  323. """get eval interval(epochs)
  324. Returns:
  325. int: the eval interval value, i.e., `Global.eval_interval` in config.
  326. """
  327. self.snapshot_epoch
  328. def get_save_interval(self) -> int:
  329. """get save interval(epochs)
  330. Returns:
  331. int: the save interval value, i.e., `Global.save_interval` in config.
  332. """
  333. self.snapshot_epoch
  334. def get_learning_rate(self) -> float:
  335. """get learning rate
  336. Returns:
  337. float: the learning rate value, i.e., `Optimizer.lr.learning_rate` in config.
  338. """
  339. return self.LearningRate['base_lr']
  340. def get_batch_size(self, mode='train') -> int:
  341. """get batch size
  342. Args:
  343. mode (str, optional): the mode that to be get batch size value, must be one of 'train', 'eval', 'test'.
  344. Defaults to 'train'.
  345. Returns:
  346. int: the batch size value of `mode`, i.e., `DataLoader.{mode}.sampler.batch_size` in config.
  347. """
  348. if mode == 'train':
  349. return self.TrainReader['batch_size']
  350. elif mode == 'eval':
  351. return self.EvalReader['batch_size']
  352. elif mode == 'test':
  353. return self.TestReader['batch_size']
  354. else:
  355. raise (f"Unknown mode: {repr(mode)}")
  356. def get_qat_epochs_iters(self) -> int:
  357. """get qat epochs
  358. Returns:
  359. int: the epochs value.
  360. """
  361. return self.epoch // 2.0
  362. def get_qat_learning_rate(self) -> float:
  363. """get qat learning rate
  364. Returns:
  365. float: the learning rate value.
  366. """
  367. return self.LearningRate['base_lr'] // 2.0
  368. def get_train_save_dir(self) -> str:
  369. """get the directory to save output
  370. Returns:
  371. str: the directory to save output
  372. """
  373. return self.save_dir