File size: 616 Bytes
f06aba5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34

import os

import torch.distributed as dist

def is_distributed():
    return get_world_size() > 1


def get_world_size():
    if not dist.is_available():
        return 1
    return dist.get_world_size() if dist.is_initialized() else 1


def get_rank():
    if not dist.is_available():
        return 0
    return dist.get_rank() if dist.is_initialized() else 0


def get_local_rank():
    if not dist.is_available():
        return 0
    return int(os.getenv('LOCAL_RANK', 0)) if dist.is_initialized() else 0


def is_master():
    return get_rank() == 0


def is_local_master():
    return get_local_rank() == 0