rec_resnet_45.py 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144
  1. # copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. """
  15. This code is refer from:
  16. https://github.com/FangShancheng/ABINet/tree/main/modules
  17. """
  18. from __future__ import absolute_import
  19. from __future__ import division
  20. from __future__ import print_function
  21. import paddle
  22. from paddle import ParamAttr
  23. from paddle.nn.initializer import KaimingNormal
  24. import paddle.nn as nn
  25. import paddle.nn.functional as F
  26. import numpy as np
  27. import math
  28. __all__ = ["ResNet45"]
  29. def conv1x1(in_planes, out_planes, stride=1):
  30. return nn.Conv2D(
  31. in_planes,
  32. out_planes,
  33. kernel_size=1,
  34. stride=1,
  35. weight_attr=ParamAttr(initializer=KaimingNormal()),
  36. bias_attr=False)
  37. def conv3x3(in_channel, out_channel, stride=1):
  38. return nn.Conv2D(
  39. in_channel,
  40. out_channel,
  41. kernel_size=3,
  42. stride=stride,
  43. padding=1,
  44. weight_attr=ParamAttr(initializer=KaimingNormal()),
  45. bias_attr=False)
  46. class BasicBlock(nn.Layer):
  47. expansion = 1
  48. def __init__(self, in_channels, channels, stride=1, downsample=None):
  49. super().__init__()
  50. self.conv1 = conv1x1(in_channels, channels)
  51. self.bn1 = nn.BatchNorm2D(channels)
  52. self.relu = nn.ReLU()
  53. self.conv2 = conv3x3(channels, channels, stride)
  54. self.bn2 = nn.BatchNorm2D(channels)
  55. self.downsample = downsample
  56. self.stride = stride
  57. def forward(self, x):
  58. residual = x
  59. out = self.conv1(x)
  60. out = self.bn1(out)
  61. out = self.relu(out)
  62. out = self.conv2(out)
  63. out = self.bn2(out)
  64. if self.downsample is not None:
  65. residual = self.downsample(x)
  66. out += residual
  67. out = self.relu(out)
  68. return out
  69. class ResNet45(nn.Layer):
  70. def __init__(self,
  71. in_channels=3,
  72. block=BasicBlock,
  73. layers=[3, 4, 6, 6, 3],
  74. strides=[2, 1, 2, 1, 1]):
  75. self.inplanes = 32
  76. super(ResNet45, self).__init__()
  77. self.conv1 = nn.Conv2D(
  78. in_channels,
  79. 32,
  80. kernel_size=3,
  81. stride=1,
  82. padding=1,
  83. weight_attr=ParamAttr(initializer=KaimingNormal()),
  84. bias_attr=False)
  85. self.bn1 = nn.BatchNorm2D(32)
  86. self.relu = nn.ReLU()
  87. self.layer1 = self._make_layer(block, 32, layers[0], stride=strides[0])
  88. self.layer2 = self._make_layer(block, 64, layers[1], stride=strides[1])
  89. self.layer3 = self._make_layer(block, 128, layers[2], stride=strides[2])
  90. self.layer4 = self._make_layer(block, 256, layers[3], stride=strides[3])
  91. self.layer5 = self._make_layer(block, 512, layers[4], stride=strides[4])
  92. self.out_channels = 512
  93. def _make_layer(self, block, planes, blocks, stride=1):
  94. downsample = None
  95. if stride != 1 or self.inplanes != planes * block.expansion:
  96. # downsample = True
  97. downsample = nn.Sequential(
  98. nn.Conv2D(
  99. self.inplanes,
  100. planes * block.expansion,
  101. kernel_size=1,
  102. stride=stride,
  103. weight_attr=ParamAttr(initializer=KaimingNormal()),
  104. bias_attr=False),
  105. nn.BatchNorm2D(planes * block.expansion), )
  106. layers = []
  107. layers.append(block(self.inplanes, planes, stride, downsample))
  108. self.inplanes = planes * block.expansion
  109. for i in range(1, blocks):
  110. layers.append(block(self.inplanes, planes))
  111. return nn.Sequential(*layers)
  112. def forward(self, x):
  113. x = self.conv1(x)
  114. x = self.bn1(x)
  115. x = self.relu(x)
  116. x = self.layer1(x)
  117. x = self.layer2(x)
  118. x = self.layer3(x)
  119. x = self.layer4(x)
  120. x = self.layer5(x)
  121. return x