Skip to content
This repository has been archived by the owner on Oct 13, 2022. It is now read-only.

Commit

Permalink
Merge pull request #157 from k2-fsa/revert-156-ddp_master_port
Browse files Browse the repository at this point in the history
Revert "Make master port configurable"
  • Loading branch information
danpovey authored Apr 12, 2021
2 parents 6df1bff + fc5b445 commit ad68cd9
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 12 deletions.
7 changes: 1 addition & 6 deletions egs/librispeech/asr/simple_v1/mmi_att_transformer_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,11 +286,6 @@ def get_parser():
type=int,
default=1,
help='Number of GPUs for DDP training.')
parser.add_argument(
'--master-port',
type=int,
default=12354,
help='Master port to use for DDP training.')
parser.add_argument(
'--model-type',
type=str,
Expand Down Expand Up @@ -367,7 +362,7 @@ def run(rank, world_size, args):
att_rate = args.att_rate

fix_random_seed(42)
setup_dist(rank, world_size, args.master_port)
setup_dist(rank, world_size)

exp_dir = Path('exp-' + model_type + '-noam-mmi-att-musan-sa')
setup_logger(f'{exp_dir}/log/log-train-{rank}')
Expand Down
7 changes: 1 addition & 6 deletions snowfall/dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,9 @@
from torch import distributed as dist


def setup_dist(rank, world_size, master_port = None):
def setup_dist(rank, world_size):
os.environ['MASTER_ADDR'] = 'localhost'
<<<<<<< HEAD
os.environ['MASTER_PORT'] = '12355'
=======
os.environ['MASTER_PORT'] = ('12354' if master_port is None
else str(master_port))
>>>>>>> 8f51e68... Make master port command-line configurable
dist.init_process_group("nccl", rank=rank, world_size=world_size)
torch.cuda.set_device(rank)

Expand Down

0 comments on commit ad68cd9

Please sign in to comment.