pgnet_pp_utils.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169
  1. # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. import paddle
  18. import os
  19. import sys
  20. __dir__ = os.path.dirname(__file__)
  21. sys.path.append(__dir__)
  22. sys.path.append(os.path.join(__dir__, '..'))
  23. from extract_textpoint_slow import *
  24. from extract_textpoint_fast import generate_pivot_list_fast, restore_poly
  25. class PGNet_PostProcess(object):
  26. # two different post-process
  27. def __init__(self,
  28. character_dict_path,
  29. valid_set,
  30. score_thresh,
  31. outs_dict,
  32. shape_list,
  33. point_gather_mode=None):
  34. self.Lexicon_Table = get_dict(character_dict_path)
  35. self.valid_set = valid_set
  36. self.score_thresh = score_thresh
  37. self.outs_dict = outs_dict
  38. self.shape_list = shape_list
  39. self.point_gather_mode = point_gather_mode
  40. def pg_postprocess_fast(self):
  41. p_score = self.outs_dict['f_score']
  42. p_border = self.outs_dict['f_border']
  43. p_char = self.outs_dict['f_char']
  44. p_direction = self.outs_dict['f_direction']
  45. if isinstance(p_score, paddle.Tensor):
  46. p_score = p_score[0].numpy()
  47. p_border = p_border[0].numpy()
  48. p_direction = p_direction[0].numpy()
  49. p_char = p_char[0].numpy()
  50. else:
  51. p_score = p_score[0]
  52. p_border = p_border[0]
  53. p_direction = p_direction[0]
  54. p_char = p_char[0]
  55. src_h, src_w, ratio_h, ratio_w = self.shape_list[0]
  56. instance_yxs_list, seq_strs = generate_pivot_list_fast(
  57. p_score,
  58. p_char,
  59. p_direction,
  60. self.Lexicon_Table,
  61. score_thresh=self.score_thresh,
  62. point_gather_mode=self.point_gather_mode)
  63. poly_list, keep_str_list = restore_poly(instance_yxs_list, seq_strs,
  64. p_border, ratio_w, ratio_h,
  65. src_w, src_h, self.valid_set)
  66. data = {
  67. 'points': poly_list,
  68. 'texts': keep_str_list,
  69. }
  70. return data
  71. def pg_postprocess_slow(self):
  72. p_score = self.outs_dict['f_score']
  73. p_border = self.outs_dict['f_border']
  74. p_char = self.outs_dict['f_char']
  75. p_direction = self.outs_dict['f_direction']
  76. if isinstance(p_score, paddle.Tensor):
  77. p_score = p_score[0].numpy()
  78. p_border = p_border[0].numpy()
  79. p_direction = p_direction[0].numpy()
  80. p_char = p_char[0].numpy()
  81. else:
  82. p_score = p_score[0]
  83. p_border = p_border[0]
  84. p_direction = p_direction[0]
  85. p_char = p_char[0]
  86. src_h, src_w, ratio_h, ratio_w = self.shape_list[0]
  87. is_curved = self.valid_set == "totaltext"
  88. char_seq_idx_set, instance_yxs_list = generate_pivot_list_slow(
  89. p_score,
  90. p_char,
  91. p_direction,
  92. score_thresh=self.score_thresh,
  93. is_backbone=True,
  94. is_curved=is_curved)
  95. seq_strs = []
  96. for char_idx_set in char_seq_idx_set:
  97. pr_str = ''.join([self.Lexicon_Table[pos] for pos in char_idx_set])
  98. seq_strs.append(pr_str)
  99. poly_list = []
  100. keep_str_list = []
  101. all_point_list = []
  102. all_point_pair_list = []
  103. for yx_center_line, keep_str in zip(instance_yxs_list, seq_strs):
  104. if len(yx_center_line) == 1:
  105. yx_center_line.append(yx_center_line[-1])
  106. offset_expand = 1.0
  107. if self.valid_set == 'totaltext':
  108. offset_expand = 1.2
  109. point_pair_list = []
  110. for batch_id, y, x in yx_center_line:
  111. offset = p_border[:, y, x].reshape(2, 2)
  112. if offset_expand != 1.0:
  113. offset_length = np.linalg.norm(
  114. offset, axis=1, keepdims=True)
  115. expand_length = np.clip(
  116. offset_length * (offset_expand - 1),
  117. a_min=0.5,
  118. a_max=3.0)
  119. offset_detal = offset / offset_length * expand_length
  120. offset = offset + offset_detal
  121. ori_yx = np.array([y, x], dtype=np.float32)
  122. point_pair = (ori_yx + offset)[:, ::-1] * 4.0 / np.array(
  123. [ratio_w, ratio_h]).reshape(-1, 2)
  124. point_pair_list.append(point_pair)
  125. all_point_list.append([
  126. int(round(x * 4.0 / ratio_w)),
  127. int(round(y * 4.0 / ratio_h))
  128. ])
  129. all_point_pair_list.append(point_pair.round().astype(np.int32)
  130. .tolist())
  131. detected_poly, pair_length_info = point_pair2poly(point_pair_list)
  132. detected_poly = expand_poly_along_width(
  133. detected_poly, shrink_ratio_of_width=0.2)
  134. detected_poly[:, 0] = np.clip(
  135. detected_poly[:, 0], a_min=0, a_max=src_w)
  136. detected_poly[:, 1] = np.clip(
  137. detected_poly[:, 1], a_min=0, a_max=src_h)
  138. if len(keep_str) < 2:
  139. continue
  140. keep_str_list.append(keep_str)
  141. detected_poly = np.round(detected_poly).astype('int32')
  142. if self.valid_set == 'partvgg':
  143. middle_point = len(detected_poly) // 2
  144. detected_poly = detected_poly[
  145. [0, middle_point - 1, middle_point, -1], :]
  146. poly_list.append(detected_poly)
  147. elif self.valid_set == 'totaltext':
  148. poly_list.append(detected_poly)
  149. else:
  150. print('--> Not supported format.')
  151. exit(-1)
  152. data = {
  153. 'points': poly_list,
  154. 'texts': keep_str_list,
  155. }
  156. return data