rec_densenet.py 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146
  1. # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. """
  15. This code is refer from:
  16. https://github.com/LBH1024/CAN/models/densenet.py
  17. """
  18. from __future__ import absolute_import
  19. from __future__ import division
  20. from __future__ import print_function
  21. import math
  22. import paddle
  23. import paddle.nn as nn
  24. import paddle.nn.functional as F
  25. class Bottleneck(nn.Layer):
  26. def __init__(self, nChannels, growthRate, use_dropout):
  27. super(Bottleneck, self).__init__()
  28. interChannels = 4 * growthRate
  29. self.bn1 = nn.BatchNorm2D(interChannels)
  30. self.conv1 = nn.Conv2D(
  31. nChannels, interChannels, kernel_size=1,
  32. bias_attr=None) # Xavier initialization
  33. self.bn2 = nn.BatchNorm2D(growthRate)
  34. self.conv2 = nn.Conv2D(
  35. interChannels, growthRate, kernel_size=3, padding=1,
  36. bias_attr=None) # Xavier initialization
  37. self.use_dropout = use_dropout
  38. self.dropout = nn.Dropout(p=0.2)
  39. def forward(self, x):
  40. out = F.relu(self.bn1(self.conv1(x)))
  41. if self.use_dropout:
  42. out = self.dropout(out)
  43. out = F.relu(self.bn2(self.conv2(out)))
  44. if self.use_dropout:
  45. out = self.dropout(out)
  46. out = paddle.concat([x, out], 1)
  47. return out
  48. class SingleLayer(nn.Layer):
  49. def __init__(self, nChannels, growthRate, use_dropout):
  50. super(SingleLayer, self).__init__()
  51. self.bn1 = nn.BatchNorm2D(nChannels)
  52. self.conv1 = nn.Conv2D(
  53. nChannels, growthRate, kernel_size=3, padding=1, bias_attr=False)
  54. self.use_dropout = use_dropout
  55. self.dropout = nn.Dropout(p=0.2)
  56. def forward(self, x):
  57. out = self.conv1(F.relu(x))
  58. if self.use_dropout:
  59. out = self.dropout(out)
  60. out = paddle.concat([x, out], 1)
  61. return out
  62. class Transition(nn.Layer):
  63. def __init__(self, nChannels, out_channels, use_dropout):
  64. super(Transition, self).__init__()
  65. self.bn1 = nn.BatchNorm2D(out_channels)
  66. self.conv1 = nn.Conv2D(
  67. nChannels, out_channels, kernel_size=1, bias_attr=False)
  68. self.use_dropout = use_dropout
  69. self.dropout = nn.Dropout(p=0.2)
  70. def forward(self, x):
  71. out = F.relu(self.bn1(self.conv1(x)))
  72. if self.use_dropout:
  73. out = self.dropout(out)
  74. out = F.avg_pool2d(out, 2, ceil_mode=True, exclusive=False)
  75. return out
  76. class DenseNet(nn.Layer):
  77. def __init__(self, growthRate, reduction, bottleneck, use_dropout,
  78. input_channel, **kwargs):
  79. super(DenseNet, self).__init__()
  80. nDenseBlocks = 16
  81. nChannels = 2 * growthRate
  82. self.conv1 = nn.Conv2D(
  83. input_channel,
  84. nChannels,
  85. kernel_size=7,
  86. padding=3,
  87. stride=2,
  88. bias_attr=False)
  89. self.dense1 = self._make_dense(nChannels, growthRate, nDenseBlocks,
  90. bottleneck, use_dropout)
  91. nChannels += nDenseBlocks * growthRate
  92. out_channels = int(math.floor(nChannels * reduction))
  93. self.trans1 = Transition(nChannels, out_channels, use_dropout)
  94. nChannels = out_channels
  95. self.dense2 = self._make_dense(nChannels, growthRate, nDenseBlocks,
  96. bottleneck, use_dropout)
  97. nChannels += nDenseBlocks * growthRate
  98. out_channels = int(math.floor(nChannels * reduction))
  99. self.trans2 = Transition(nChannels, out_channels, use_dropout)
  100. nChannels = out_channels
  101. self.dense3 = self._make_dense(nChannels, growthRate, nDenseBlocks,
  102. bottleneck, use_dropout)
  103. self.out_channels = out_channels
  104. def _make_dense(self, nChannels, growthRate, nDenseBlocks, bottleneck,
  105. use_dropout):
  106. layers = []
  107. for i in range(int(nDenseBlocks)):
  108. if bottleneck:
  109. layers.append(Bottleneck(nChannels, growthRate, use_dropout))
  110. else:
  111. layers.append(SingleLayer(nChannels, growthRate, use_dropout))
  112. nChannels += growthRate
  113. return nn.Sequential(*layers)
  114. def forward(self, inputs):
  115. x, x_m, y = inputs
  116. out = self.conv1(x)
  117. out = F.relu(out)
  118. out = F.max_pool2d(out, 2, ceil_mode=True)
  119. out = self.dense1(out)
  120. out = self.trans1(out)
  121. out = self.dense2(out)
  122. out = self.trans2(out)
  123. out = self.dense3(out)
  124. return out, x_m, y