Skip to content

Commit

Permalink
types
Browse files Browse the repository at this point in the history
  • Loading branch information
awaelchli committed Jul 25, 2022
1 parent a585729 commit c037ad5
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion src/pytorch_lightning/strategies/ddp.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ def _setup_model(self, model: Module) -> DistributedDataParallel:
log.detail(f"setting up DDP model with device ids: {device_ids}, kwargs: {self._ddp_kwargs}")
return DistributedDataParallel(module=model, device_ids=device_ids, **self._ddp_kwargs)

def setup_distributed(self):
def setup_distributed(self) -> None:
log.detail(f"{self.__class__.__name__}: setting up distributed...")
reset_seed()
self.set_world_ranks()
Expand Down
2 changes: 1 addition & 1 deletion src/pytorch_lightning/strategies/ddp_spawn.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def _setup_model(self, model: Module) -> DistributedDataParallel:
"""Wraps the model into a :class:`~torch.nn.parallel.distributed.DistributedDataParallel` module."""
return DistributedDataParallel(module=model, device_ids=self.determine_ddp_device_ids(), **self._ddp_kwargs)

def setup_distributed(self):
def setup_distributed(self) -> None:
log.detail(f"{self.__class__.__name__}: setting up distributed...")
reset_seed()
self.set_world_ranks()
Expand Down
2 changes: 1 addition & 1 deletion src/pytorch_lightning/strategies/deepspeed.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ def _load_config(self, config):
config = json.load(f)
return config

def setup_distributed(self):
def setup_distributed(self) -> None:
reset_seed()
self.set_world_ranks()
rank_zero_only.rank = self.global_rank
Expand Down
2 changes: 1 addition & 1 deletion src/pytorch_lightning/strategies/tpu_spawn.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def reduce(self, output, group: Optional[Any] = None, reduce_op: Optional[Union[

return output

def setup_distributed(self):
def setup_distributed(self) -> None:
reset_seed()
self.set_world_ranks()
rank_zero_only.rank = self.global_rank
Expand Down

0 comments on commit c037ad5

Please sign in to comment.