save_load.py 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139
  1. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. import errno
  18. import os
  19. import re
  20. import shutil
  21. import tempfile
  22. import paddle
  23. from paddlex.ppcls.utils import logger
  24. __all__ = ['init_model', 'save_model']
  25. def _mkdir_if_not_exist(path):
  26. """
  27. mkdir if not exists, ignore the exception when multiprocess mkdir together
  28. """
  29. if not os.path.exists(path):
  30. try:
  31. os.makedirs(path)
  32. except OSError as e:
  33. if e.errno == errno.EEXIST and os.path.isdir(path):
  34. logger.warning(
  35. 'be happy if some process has already created {}'.format(
  36. path))
  37. else:
  38. raise OSError('Failed to mkdir {}'.format(path))
  39. def _load_state(path):
  40. if os.path.exists(path + '.pdopt'):
  41. # XXX another hack to ignore the optimizer state
  42. tmp = tempfile.mkdtemp()
  43. dst = os.path.join(tmp, os.path.basename(os.path.normpath(path)))
  44. shutil.copy(path + '.pdparams', dst + '.pdparams')
  45. state = paddle.static.load_program_state(dst)
  46. shutil.rmtree(tmp)
  47. else:
  48. state = paddle.static.load_program_state(path)
  49. return state
  50. def load_params(exe, prog, path, ignore_params=None):
  51. """
  52. Load model from the given path.
  53. Args:
  54. exe (paddle.static.Executor): The paddle.static.Executor object.
  55. prog (paddle.static.Program): load weight to which Program object.
  56. path (string): URL string or loca model path.
  57. ignore_params (list): ignore variable to load when finetuning.
  58. It can be specified by finetune_exclude_pretrained_params
  59. and the usage can refer to the document
  60. docs/advanced_tutorials/TRANSFER_LEARNING.md
  61. """
  62. if not (os.path.isdir(path) or os.path.exists(path + '.pdparams')):
  63. raise ValueError("Model pretrain path {} does not "
  64. "exists.".format(path))
  65. logger.info("Loading parameters from {}...".format(path))
  66. ignore_set = set()
  67. state = _load_state(path)
  68. # ignore the parameter which mismatch the shape
  69. # between the model and pretrain weight.
  70. all_var_shape = {}
  71. for block in prog.blocks:
  72. for param in block.all_parameters():
  73. all_var_shape[param.name] = param.shape
  74. ignore_set.update([
  75. name for name, shape in all_var_shape.items()
  76. if name in state and shape != state[name].shape
  77. ])
  78. if ignore_params:
  79. all_var_names = [var.name for var in prog.list_vars()]
  80. ignore_list = filter(
  81. lambda var: any([re.match(name, var) for name in ignore_params]),
  82. all_var_names)
  83. ignore_set.update(list(ignore_list))
  84. if len(ignore_set) > 0:
  85. for k in ignore_set:
  86. if k in state:
  87. logger.warning(
  88. 'variable {} is already excluded automatically'.format(k))
  89. del state[k]
  90. paddle.static.set_program_state(prog, state)
  91. def init_model(config, program, exe):
  92. """
  93. load model from checkpoint or pretrained_model
  94. """
  95. checkpoints = config.get('checkpoints')
  96. if checkpoints:
  97. paddle.static.load(program, checkpoints, exe)
  98. logger.info("Finish initing model from {}".format(checkpoints))
  99. return
  100. pretrained_model = config.get('pretrained_model')
  101. if pretrained_model:
  102. if not isinstance(pretrained_model, list):
  103. pretrained_model = [pretrained_model]
  104. for pretrain in pretrained_model:
  105. load_params(exe, program, pretrain)
  106. logger.info("Finish initing model from {}".format(pretrained_model))
  107. def save_model(program, model_path, epoch_id, prefix='ppcls'):
  108. """
  109. save model to the target path
  110. """
  111. if paddle.distributed.get_rank() != 0:
  112. return
  113. model_path = os.path.join(model_path, str(epoch_id))
  114. _mkdir_if_not_exist(model_path)
  115. model_prefix = os.path.join(model_path, prefix)
  116. paddle.static.save(program, model_prefix)
  117. logger.info("Already save model in {}".format(model_path))