darknet.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341
  1. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import paddle
  15. import paddle.nn as nn
  16. import paddle.nn.functional as F
  17. from paddle import ParamAttr
  18. from paddle.regularizer import L2Decay
  19. from paddlex.ppdet.core.workspace import register, serializable
  20. from paddlex.ppdet.modeling.ops import batch_norm, mish
  21. from ..shape_spec import ShapeSpec
  22. __all__ = ['DarkNet', 'ConvBNLayer']
  23. class ConvBNLayer(nn.Layer):
  24. def __init__(self,
  25. ch_in,
  26. ch_out,
  27. filter_size=3,
  28. stride=1,
  29. groups=1,
  30. padding=0,
  31. norm_type='bn',
  32. norm_decay=0.,
  33. act="leaky",
  34. freeze_norm=False,
  35. data_format='NCHW',
  36. name=''):
  37. """
  38. conv + bn + activation layer
  39. Args:
  40. ch_in (int): input channel
  41. ch_out (int): output channel
  42. filter_size (int): filter size, default 3
  43. stride (int): stride, default 1
  44. groups (int): number of groups of conv layer, default 1
  45. padding (int): padding size, default 0
  46. norm_type (str): batch norm type, default bn
  47. norm_decay (str): decay for weight and bias of batch norm layer, default 0.
  48. act (str): activation function type, default 'leaky', which means leaky_relu
  49. freeze_norm (bool): whether to freeze norm, default False
  50. data_format (str): data format, NCHW or NHWC
  51. """
  52. super(ConvBNLayer, self).__init__()
  53. self.conv = nn.Conv2D(
  54. in_channels=ch_in,
  55. out_channels=ch_out,
  56. kernel_size=filter_size,
  57. stride=stride,
  58. padding=padding,
  59. groups=groups,
  60. data_format=data_format,
  61. bias_attr=False)
  62. self.batch_norm = batch_norm(
  63. ch_out,
  64. norm_type=norm_type,
  65. norm_decay=norm_decay,
  66. freeze_norm=freeze_norm,
  67. data_format=data_format)
  68. self.act = act
  69. def forward(self, inputs):
  70. out = self.conv(inputs)
  71. out = self.batch_norm(out)
  72. if self.act == 'leaky':
  73. out = F.leaky_relu(out, 0.1)
  74. elif self.act == 'mish':
  75. out = mish(out)
  76. return out
  77. class DownSample(nn.Layer):
  78. def __init__(self,
  79. ch_in,
  80. ch_out,
  81. filter_size=3,
  82. stride=2,
  83. padding=1,
  84. norm_type='bn',
  85. norm_decay=0.,
  86. freeze_norm=False,
  87. data_format='NCHW'):
  88. """
  89. downsample layer
  90. Args:
  91. ch_in (int): input channel
  92. ch_out (int): output channel
  93. filter_size (int): filter size, default 3
  94. stride (int): stride, default 2
  95. padding (int): padding size, default 1
  96. norm_type (str): batch norm type, default bn
  97. norm_decay (str): decay for weight and bias of batch norm layer, default 0.
  98. freeze_norm (bool): whether to freeze norm, default False
  99. data_format (str): data format, NCHW or NHWC
  100. """
  101. super(DownSample, self).__init__()
  102. self.conv_bn_layer = ConvBNLayer(
  103. ch_in=ch_in,
  104. ch_out=ch_out,
  105. filter_size=filter_size,
  106. stride=stride,
  107. padding=padding,
  108. norm_type=norm_type,
  109. norm_decay=norm_decay,
  110. freeze_norm=freeze_norm,
  111. data_format=data_format)
  112. self.ch_out = ch_out
  113. def forward(self, inputs):
  114. out = self.conv_bn_layer(inputs)
  115. return out
  116. class BasicBlock(nn.Layer):
  117. def __init__(self,
  118. ch_in,
  119. ch_out,
  120. norm_type='bn',
  121. norm_decay=0.,
  122. freeze_norm=False,
  123. data_format='NCHW'):
  124. """
  125. BasicBlock layer of DarkNet
  126. Args:
  127. ch_in (int): input channel
  128. ch_out (int): output channel
  129. norm_type (str): batch norm type, default bn
  130. norm_decay (str): decay for weight and bias of batch norm layer, default 0.
  131. freeze_norm (bool): whether to freeze norm, default False
  132. data_format (str): data format, NCHW or NHWC
  133. """
  134. super(BasicBlock, self).__init__()
  135. self.conv1 = ConvBNLayer(
  136. ch_in=ch_in,
  137. ch_out=ch_out,
  138. filter_size=1,
  139. stride=1,
  140. padding=0,
  141. norm_type=norm_type,
  142. norm_decay=norm_decay,
  143. freeze_norm=freeze_norm,
  144. data_format=data_format)
  145. self.conv2 = ConvBNLayer(
  146. ch_in=ch_out,
  147. ch_out=ch_out * 2,
  148. filter_size=3,
  149. stride=1,
  150. padding=1,
  151. norm_type=norm_type,
  152. norm_decay=norm_decay,
  153. freeze_norm=freeze_norm,
  154. data_format=data_format)
  155. def forward(self, inputs):
  156. conv1 = self.conv1(inputs)
  157. conv2 = self.conv2(conv1)
  158. out = paddle.add(x=inputs, y=conv2)
  159. return out
  160. class Blocks(nn.Layer):
  161. def __init__(self,
  162. ch_in,
  163. ch_out,
  164. count,
  165. norm_type='bn',
  166. norm_decay=0.,
  167. freeze_norm=False,
  168. name=None,
  169. data_format='NCHW'):
  170. """
  171. Blocks layer, which consist of some BaickBlock layers
  172. Args:
  173. ch_in (int): input channel
  174. ch_out (int): output channel
  175. count (int): number of BasicBlock layer
  176. norm_type (str): batch norm type, default bn
  177. norm_decay (str): decay for weight and bias of batch norm layer, default 0.
  178. freeze_norm (bool): whether to freeze norm, default False
  179. name (str): layer name
  180. data_format (str): data format, NCHW or NHWC
  181. """
  182. super(Blocks, self).__init__()
  183. self.basicblock0 = BasicBlock(
  184. ch_in,
  185. ch_out,
  186. norm_type=norm_type,
  187. norm_decay=norm_decay,
  188. freeze_norm=freeze_norm,
  189. data_format=data_format)
  190. self.res_out_list = []
  191. for i in range(1, count):
  192. block_name = '{}.{}'.format(name, i)
  193. res_out = self.add_sublayer(
  194. block_name,
  195. BasicBlock(
  196. ch_out * 2,
  197. ch_out,
  198. norm_type=norm_type,
  199. norm_decay=norm_decay,
  200. freeze_norm=freeze_norm,
  201. data_format=data_format))
  202. self.res_out_list.append(res_out)
  203. self.ch_out = ch_out
  204. def forward(self, inputs):
  205. y = self.basicblock0(inputs)
  206. for basic_block_i in self.res_out_list:
  207. y = basic_block_i(y)
  208. return y
  209. DarkNet_cfg = {53: ([1, 2, 8, 8, 4])}
  210. @register
  211. @serializable
  212. class DarkNet(nn.Layer):
  213. __shared__ = ['norm_type', 'data_format']
  214. def __init__(self,
  215. depth=53,
  216. freeze_at=-1,
  217. return_idx=[2, 3, 4],
  218. num_stages=5,
  219. norm_type='bn',
  220. norm_decay=0.,
  221. freeze_norm=False,
  222. data_format='NCHW'):
  223. """
  224. Darknet, see https://pjreddie.com/darknet/yolo/
  225. Args:
  226. depth (int): depth of network
  227. freeze_at (int): freeze the backbone at which stage
  228. filter_size (int): filter size, default 3
  229. return_idx (list): index of stages whose feature maps are returned
  230. norm_type (str): batch norm type, default bn
  231. norm_decay (str): decay for weight and bias of batch norm layer, default 0.
  232. data_format (str): data format, NCHW or NHWC
  233. """
  234. super(DarkNet, self).__init__()
  235. self.depth = depth
  236. self.freeze_at = freeze_at
  237. self.return_idx = return_idx
  238. self.num_stages = num_stages
  239. self.stages = DarkNet_cfg[self.depth][0:num_stages]
  240. self.conv0 = ConvBNLayer(
  241. ch_in=3,
  242. ch_out=32,
  243. filter_size=3,
  244. stride=1,
  245. padding=1,
  246. norm_type=norm_type,
  247. norm_decay=norm_decay,
  248. freeze_norm=freeze_norm,
  249. data_format=data_format)
  250. self.downsample0 = DownSample(
  251. ch_in=32,
  252. ch_out=32 * 2,
  253. norm_type=norm_type,
  254. norm_decay=norm_decay,
  255. freeze_norm=freeze_norm,
  256. data_format=data_format)
  257. self._out_channels = []
  258. self.darknet_conv_block_list = []
  259. self.downsample_list = []
  260. ch_in = [64, 128, 256, 512, 1024]
  261. for i, stage in enumerate(self.stages):
  262. name = 'stage.{}'.format(i)
  263. conv_block = self.add_sublayer(
  264. name,
  265. Blocks(
  266. int(ch_in[i]),
  267. 32 * (2**i),
  268. stage,
  269. norm_type=norm_type,
  270. norm_decay=norm_decay,
  271. freeze_norm=freeze_norm,
  272. data_format=data_format,
  273. name=name))
  274. self.darknet_conv_block_list.append(conv_block)
  275. if i in return_idx:
  276. self._out_channels.append(64 * (2**i))
  277. for i in range(num_stages - 1):
  278. down_name = 'stage.{}.downsample'.format(i)
  279. downsample = self.add_sublayer(
  280. down_name,
  281. DownSample(
  282. ch_in=32 * (2**(i + 1)),
  283. ch_out=32 * (2**(i + 2)),
  284. norm_type=norm_type,
  285. norm_decay=norm_decay,
  286. freeze_norm=freeze_norm,
  287. data_format=data_format))
  288. self.downsample_list.append(downsample)
  289. def forward(self, inputs):
  290. x = inputs['image']
  291. out = self.conv0(x)
  292. out = self.downsample0(out)
  293. blocks = []
  294. for i, conv_block_i in enumerate(self.darknet_conv_block_list):
  295. out = conv_block_i(out)
  296. if i == self.freeze_at:
  297. out.stop_gradient = True
  298. if i in self.return_idx:
  299. blocks.append(out)
  300. if i < self.num_stages - 1:
  301. out = self.downsample_list[i](out)
  302. return blocks
  303. @property
  304. def out_shape(self):
  305. return [ShapeSpec(channels=c) for c in self._out_channels]