diff --git a/src/pytorch_lightning/CHANGELOG.md b/src/pytorch_lightning/CHANGELOG.md index 60afab1d0bcc9..e184893436a42 100644 --- a/src/pytorch_lightning/CHANGELOG.md +++ b/src/pytorch_lightning/CHANGELOG.md @@ -83,7 +83,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed deprecated `pytorch_lightning.profiler.base.BaseProfiler` in favor of `pytorch_lightning.profilers.profiler.Profiler` ([#15637](https://github.com/Lightning-AI/lightning/pull/15637)) -- +- Removed deprecated code in `pytorch_lightning.utilities.meta` ([#16038](https://github.com/Lightning-AI/lightning/pull/16038)) ### Fixed diff --git a/src/pytorch_lightning/utilities/meta.py b/src/pytorch_lightning/utilities/meta.py index 6670dc7a63f6f..10ddf80bb831d 100644 --- a/src/pytorch_lightning/utilities/meta.py +++ b/src/pytorch_lightning/utilities/meta.py @@ -11,88 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from contextlib import contextmanager -from typing import Any, Callable, Generator, Mapping, Optional, Set, Type, Union +from typing import Mapping, Optional, Union from lightning_utilities.core.imports import module_available from torch import Tensor from torch.nn import Module, Parameter -from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation - - -def is_meta_init() -> bool: - rank_zero_deprecation( - "`pytorch_lightning.utilities.meta.is_meta_init` is deprecated in v1.8 and will be removed in v1.9." - " The function has become a no-op." - " Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx" - ) - return False - - -def init_meta(module_fn: Callable[..., Module], *args: Any, **kwargs: Any) -> None: - rank_zero_deprecation( - "`pytorch_lightning.utilities.meta.init_meta` is deprecated in v1.8 and will be removed in v1.9." - " The function has become a no-op." - " Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx" - ) - - -def get_all_subclasses(cls: Type) -> Set[Type]: - rank_zero_deprecation( - "`pytorch_lightning.utilities.meta.get_all_subclasses` is deprecated in v1.8 and will be removed in v1.9." - " Please copy its implementation if you have a use for it." - ) - from lightning_utilities.core.inheritance import get_all_subclasses as new_get_all_subclasses - - return new_get_all_subclasses(cls) - - -def recursively_setattr(root_module: Any, prefix: str, materialized_module: Module) -> None: - rank_zero_deprecation( - "`pytorch_lightning.utilities.meta.recursively_setattr` is deprecated in v1.8 and will be removed in v1.9." - " Please copy its implementation if you have a use for it." - ) - *path, name = prefix.split(".") - for p in path: - root_module = getattr(root_module, p) - - try: - index = int(name) - root_module[index] = materialized_module - except ValueError: - setattr(root_module, name, materialized_module) - - -def materialize_module(root_module: Module) -> None: - rank_zero_deprecation( - "`pytorch_lightning.utilities.meta.materialize_module` is deprecated in v1.8 and will be removed in v1.9." - " The function has become a no-op." - " Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx" - ) - - -@contextmanager -def init_meta_context() -> Generator: - rank_zero_deprecation( - "`pytorch_lightning.utilities.meta.init_meta_context` is deprecated in v1.8 and will be removed in v1.9." - " The function has become a no-op." - " Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx" - ) - yield - - -def is_on_meta_device(module: Module) -> bool: - rank_zero_deprecation( - "`pytorch_lightning.utilities.meta.is_on_meta_device` is deprecated in v1.8 and will be removed in v1.9." - " Please copy its implementation if you have a use for it." - ) - try: - param = next(module.parameters()) - return param.is_meta - except StopIteration: - return False - def _is_deferred(module: Optional[Module]) -> bool: if module is None or not module_available("torchdistx.fake"): diff --git a/tests/tests_pytorch/deprecated_api/test_remove_1-9.py b/tests/tests_pytorch/deprecated_api/test_remove_1-9.py index baccbebb658bc..c9585c98aeb4c 100644 --- a/tests/tests_pytorch/deprecated_api/test_remove_1-9.py +++ b/tests/tests_pytorch/deprecated_api/test_remove_1-9.py @@ -223,17 +223,3 @@ def test_gpu_accelerator_deprecation_warning(): def test_v1_9_0_deprecated_lightning_deepspeed_module(): with pytest.deprecated_call(match=r"has been deprecated in v1.7.1 and will be removed in v1.9."): _ = LightningDeepSpeedModule(BoringModel(), 32) - - -def test_meta_utility_deprecations(): - import pytorch_lightning.utilities.meta as meta - - pytest.deprecated_call(meta.is_meta_init, match="is_meta_init.*removed in v1.9") - pytest.deprecated_call(meta.init_meta, Mock(), match="init_meta.*removed in v1.9") - pytest.deprecated_call(meta.get_all_subclasses, Mock, match="get_all_subclasses.*removed in v1.9") - pytest.deprecated_call(meta.recursively_setattr, Mock(), "foo", 1, match="recursively_setattr.*removed in v1.9") - pytest.deprecated_call(meta.materialize_module, Mock(), match="materialize_module.*removed in v1.9") - with pytest.deprecated_call(match="init_meta_context.*removed in v1.9"): - with meta.init_meta_context(): - pass - pytest.deprecated_call(meta.is_on_meta_device, LightningModule(), match="is_on_meta_device.*removed in v1.9")