-
Notifications
You must be signed in to change notification settings - Fork 125
/
Copy pathdistributed_util.py
71 lines (48 loc) · 1.48 KB
/
distributed_util.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import torch
import torch.distributed as dist
def is_distributed(args: dict) -> bool:
"""Whether DDP is activated
Args:
args (dict): The arguments passed in
Returns:
bool: Whether DDP is activated
"""
return args.local_rank != -1
def is_main_process(args: dict):
"""Whether the process is running as the main process
Args:
args (dict): The arguments passed in
Returns:
bool: Whether the process is running as the main process
"""
return not is_distributed(args) or args.local_rank == 0
def init_distributed(args: dict):
"""Initializes the DDP process pool and sets the default device
Args:
args (dict): The arguments passed in
"""
if is_distributed(args):
dist.init_process_group(backend="nccl")
torch.cuda.set_device(args.local_rank)
def get_device(args: dict) -> torch.device:
"""Gets the default device
Args:
args (dict): The arguments passed in
Returns:
torch.device: The default device
"""
if is_distributed(args):
device = torch.device("cuda", args.local_rank)
else:
if torch.cuda.is_available():
device = torch.device("cuda", 0)
else:
device = torch.device("cpu")
return device
def deinit_distributed(args: dict):
"""Cleanups ddp process group
Args:
args (dict): The arguments passed in
"""
if is_distributed(args):
dist.destroy_process_group()