__init__.py 2.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465
  1. # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. from __future__ import unicode_literals
  18. import copy
  19. import paddle
  20. __all__ = ['build_optimizer']
  21. def build_lr_scheduler(lr_config, epochs, step_each_epoch):
  22. from . import learning_rate
  23. lr_config.update({'epochs': epochs, 'step_each_epoch': step_each_epoch})
  24. lr_name = lr_config.pop('name', 'Const')
  25. lr = getattr(learning_rate, lr_name)(**lr_config)()
  26. return lr
  27. def build_optimizer(config, epochs, step_each_epoch, model):
  28. from . import regularizer, optimizer
  29. config = copy.deepcopy(config)
  30. # step1 build lr
  31. lr = build_lr_scheduler(config.pop('lr'), epochs, step_each_epoch)
  32. # step2 build regularization
  33. if 'regularizer' in config and config['regularizer'] is not None:
  34. reg_config = config.pop('regularizer')
  35. reg_name = reg_config.pop('name')
  36. if not hasattr(regularizer, reg_name):
  37. reg_name += 'Decay'
  38. reg = getattr(regularizer, reg_name)(**reg_config)()
  39. elif 'weight_decay' in config:
  40. reg = config.pop('weight_decay')
  41. else:
  42. reg = None
  43. # step3 build optimizer
  44. optim_name = config.pop('name')
  45. if 'clip_norm' in config:
  46. clip_norm = config.pop('clip_norm')
  47. grad_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
  48. elif 'clip_norm_global' in config:
  49. clip_norm = config.pop('clip_norm_global')
  50. grad_clip = paddle.nn.ClipGradByGlobalNorm(clip_norm=clip_norm)
  51. else:
  52. grad_clip = None
  53. optim = getattr(optimizer, optim_name)(learning_rate=lr,
  54. weight_decay=reg,
  55. grad_clip=grad_clip,
  56. **config)
  57. return optim(model), lr