unit_test.sh 514 B

12345678
  1. python python_single_gpu_unit_test.py
  2. python single_gpu_unit_test.py
  3. python test_batchnorm1d.py
  4. python -m torch.distributed.launch --nproc_per_node=2 two_gpu_unit_test.py
  5. python -m torch.distributed.launch --nproc_per_node=2 two_gpu_unit_test.py --fp16
  6. python -m torch.distributed.launch --nproc_per_node=2 two_gpu_test_different_batch_size.py --apex
  7. #beware, you need a system with at least 4 gpus to test group_size<world_size
  8. #python -m torch.distributed.launch --nproc_per_node=4 test_groups.py --group_size=2