From 43635a9a9b20fa4367c0a5cc0979f04c4527c6b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Wed, 29 Jun 2022 20:51:48 +0200 Subject: [PATCH] Remove remaining old-style AcceleratorConnector properties (#13412) Co-authored-by: Jirka --- .../trainer/connectors/accelerator_connector.py | 15 +-------------- src/pytorch_lightning/trainer/trainer.py | 2 +- .../deprecated_api/test_remove_1-8.py | 12 ++++++++---- 3 files changed, 10 insertions(+), 19 deletions(-) diff --git a/src/pytorch_lightning/trainer/connectors/accelerator_connector.py b/src/pytorch_lightning/trainer/connectors/accelerator_connector.py index ab7094c90b1bc..2ec0e30a5b739 100644 --- a/src/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/src/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -811,22 +811,9 @@ def _lazy_init_strategy(self) -> None: f" found {self.strategy.__class__.__name__}." ) - """The following properties are here for backward-compatibility and will be deprecated and removed in favor - of accessing this information through the strategy/accelerator directly.""" - # TODO: deprecate all properties below - - @property - def tpu_cores(self) -> Optional[Union[List[int], int]]: - if isinstance(self.accelerator, TPUAccelerator): - return self._tpu_cores # type: ignore - return 0 - - @property - def gpus(self) -> Optional[Union[List[int], str, int]]: - return self._gpus - @property def is_distributed(self) -> bool: + # TODO: deprecate this property # Used for custom plugins. # Custom plugins should implement is_distributed property. if hasattr(self.strategy, "is_distributed") and not isinstance(self.accelerator, TPUAccelerator): diff --git a/src/pytorch_lightning/trainer/trainer.py b/src/pytorch_lightning/trainer/trainer.py index 46774395fd5e2..e823ff7e08eb0 100644 --- a/src/pytorch_lightning/trainer/trainer.py +++ b/src/pytorch_lightning/trainer/trainer.py @@ -2180,7 +2180,7 @@ def gpus(self) -> Optional[Union[List[int], str, int]]: "`Trainer.gpus` was deprecated in v1.6 and will be removed in v1.8." " Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead." ) - return self._accelerator_connector.gpus + return self._accelerator_connector._gpus @property def model(self) -> torch.nn.Module: diff --git a/tests/tests_pytorch/deprecated_api/test_remove_1-8.py b/tests/tests_pytorch/deprecated_api/test_remove_1-8.py index 85328372acaa0..de02cba564c0a 100644 --- a/tests/tests_pytorch/deprecated_api/test_remove_1-8.py +++ b/tests/tests_pytorch/deprecated_api/test_remove_1-8.py @@ -1131,8 +1131,10 @@ def test_trainer_gpus(monkeypatch, trainer_kwargs): monkeypatch.setattr(torch.cuda, "device_count", lambda: 4) trainer = Trainer(**trainer_kwargs) with pytest.deprecated_call( - match="`Trainer.gpus` was deprecated in v1.6 and will be removed in v1.8." - " Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead." + match=( + "`Trainer.gpus` was deprecated in v1.6 and will be removed in v1.8." + " Please use `Trainer.num_devices` or `Trainer.device_ids` to get device information instead." + ) ): assert trainer.gpus == trainer_kwargs["devices"] @@ -1141,8 +1143,10 @@ def test_trainer_tpu_cores(monkeypatch): monkeypatch.setattr(pytorch_lightning.accelerators.tpu.TPUAccelerator, "is_available", lambda _: True) trainer = Trainer(accelerator="tpu", devices=8) with pytest.deprecated_call( - match="`Trainer.tpu_cores` is deprecated in v1.6 and will be removed in v1.8. " - "Please use `Trainer.num_devices` instead." + match=( + "`Trainer.tpu_cores` is deprecated in v1.6 and will be removed in v1.8. " + "Please use `Trainer.num_devices` instead." + ) ): assert trainer.tpu_cores == 8