Skip to content
This repository has been archived by the owner on Oct 13, 2022. It is now read-only.

Commit

Permalink
Merge pull request #156 from danpovey/ddp_master_port
Browse files Browse the repository at this point in the history
Make master port configurable
  • Loading branch information
danpovey authored Apr 12, 2021
2 parents 45c5b4d + 2cdf8fb commit 6df1bff
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 2 deletions.
7 changes: 6 additions & 1 deletion egs/librispeech/asr/simple_v1/mmi_att_transformer_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,11 @@ def get_parser():
type=int,
default=1,
help='Number of GPUs for DDP training.')
parser.add_argument(
'--master-port',
type=int,
default=12354,
help='Master port to use for DDP training.')
parser.add_argument(
'--model-type',
type=str,
Expand Down Expand Up @@ -362,7 +367,7 @@ def run(rank, world_size, args):
att_rate = args.att_rate

fix_random_seed(42)
setup_dist(rank, world_size)
setup_dist(rank, world_size, args.master_port)

exp_dir = Path('exp-' + model_type + '-noam-mmi-att-musan-sa')
setup_logger(f'{exp_dir}/log/log-train-{rank}')
Expand Down
7 changes: 6 additions & 1 deletion snowfall/dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,14 @@
from torch import distributed as dist


def setup_dist(rank, world_size):
def setup_dist(rank, world_size, master_port = None):
os.environ['MASTER_ADDR'] = 'localhost'
<<<<<<< HEAD
os.environ['MASTER_PORT'] = '12355'
=======
os.environ['MASTER_PORT'] = ('12354' if master_port is None
else str(master_port))
>>>>>>> 8f51e68... Make master port command-line configurable
dist.init_process_group("nccl", rank=rank, world_size=world_size)
torch.cuda.set_device(rank)

Expand Down

0 comments on commit 6df1bff

Please sign in to comment.