diff --git a/pytorch_lightning/plugins/training_type/ddp.py b/pytorch_lightning/plugins/training_type/ddp.py index 220a18c4c9b295..a13428c51f09b5 100644 --- a/pytorch_lightning/plugins/training_type/ddp.py +++ b/pytorch_lightning/plugins/training_type/ddp.py @@ -326,7 +326,7 @@ def post_dispatch(self) -> None: def barrier(self, *args, **kwargs) -> None: if not torch_distrib.is_initialized(): return - if _TORCH_GREATER_EQUAL_1_8 and self.torch_distributed_backend == "nccl": + if _TORCH_GREATER_EQUAL_1_8 and torch_distrib.dist_backend == "nccl": torch_distrib.barrier(device_ids=self.determine_ddp_device_ids()) else: torch_distrib.barrier() diff --git a/pytorch_lightning/plugins/training_type/ddp_spawn.py b/pytorch_lightning/plugins/training_type/ddp_spawn.py index cd0ecb5f4039e4..323fa19dab82f9 100644 --- a/pytorch_lightning/plugins/training_type/ddp_spawn.py +++ b/pytorch_lightning/plugins/training_type/ddp_spawn.py @@ -312,8 +312,7 @@ def __recover_child_process_weights(self, best_path, last_path): def barrier(self, *args, **kwargs) -> None: if not torch_distrib.is_initialized(): return - if _TORCH_GREATER_EQUAL_1_8 and self.torch_distributed_backend == "nccl": - assert torch_distrib.dist_backend == "nccl" + if _TORCH_GREATER_EQUAL_1_8 and torch_distrib.dist_backend == "nccl": torch_distrib.barrier(device_ids=self.determine_ddp_device_ids()) else: torch_distrib.barrier()