det_ct_head.py 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869
  1. # copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. import math
  18. import paddle
  19. from paddle import nn
  20. import paddle.nn.functional as F
  21. from paddle import ParamAttr
  22. import math
  23. from paddle.nn.initializer import TruncatedNormal, Constant, Normal
  24. ones_ = Constant(value=1.)
  25. zeros_ = Constant(value=0.)
  26. class CT_Head(nn.Layer):
  27. def __init__(self,
  28. in_channels,
  29. hidden_dim,
  30. num_classes,
  31. loss_kernel=None,
  32. loss_loc=None):
  33. super(CT_Head, self).__init__()
  34. self.conv1 = nn.Conv2D(
  35. in_channels, hidden_dim, kernel_size=3, stride=1, padding=1)
  36. self.bn1 = nn.BatchNorm2D(hidden_dim)
  37. self.relu1 = nn.ReLU()
  38. self.conv2 = nn.Conv2D(
  39. hidden_dim, num_classes, kernel_size=1, stride=1, padding=0)
  40. for m in self.sublayers():
  41. if isinstance(m, nn.Conv2D):
  42. n = m._kernel_size[0] * m._kernel_size[1] * m._out_channels
  43. normal_ = Normal(mean=0.0, std=math.sqrt(2. / n))
  44. normal_(m.weight)
  45. elif isinstance(m, nn.BatchNorm2D):
  46. zeros_(m.bias)
  47. ones_(m.weight)
  48. def _upsample(self, x, scale=1):
  49. return F.upsample(x, scale_factor=scale, mode='bilinear')
  50. def forward(self, f, targets=None):
  51. out = self.conv1(f)
  52. out = self.relu1(self.bn1(out))
  53. out = self.conv2(out)
  54. if self.training:
  55. out = self._upsample(out, scale=4)
  56. return {'maps': out}
  57. else:
  58. score = F.sigmoid(out[:, 0, :, :])
  59. return {'maps': out, 'score': score}