3
0

env.py 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from __future__ import absolute_import
  15. from __future__ import division
  16. from __future__ import print_function
  17. import os
  18. import random
  19. import numpy as np
  20. import paddle
  21. from paddle.distributed import fleet
  22. __all__ = ['init_parallel_env', 'set_random_seed', 'init_fleet_env']
  23. def init_fleet_env(find_unused_parameters=False):
  24. strategy = fleet.DistributedStrategy()
  25. strategy.find_unused_parameters = find_unused_parameters
  26. fleet.init(is_collective=True, strategy=strategy)
  27. def init_parallel_env():
  28. env = os.environ
  29. dist = 'PADDLE_TRAINER_ID' in env and 'PADDLE_TRAINERS_NUM' in env
  30. if dist:
  31. trainer_id = int(env['PADDLE_TRAINER_ID'])
  32. local_seed = (99 + trainer_id)
  33. random.seed(local_seed)
  34. np.random.seed(local_seed)
  35. paddle.distributed.init_parallel_env()
  36. def set_random_seed(seed):
  37. paddle.seed(seed)
  38. random.seed(seed)
  39. np.random.seed(seed)