Skip to content

Commit

Permalink
Remove deperecated code in pl.utilities.meta (#16038)
Browse files Browse the repository at this point in the history
  • Loading branch information
carmocca authored Dec 13, 2022
1 parent ca75e49 commit 53bf714
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 92 deletions.
2 changes: 1 addition & 1 deletion src/pytorch_lightning/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed deprecated `pytorch_lightning.profiler.base.BaseProfiler` in favor of `pytorch_lightning.profilers.profiler.Profiler` ([#15637](https://github.com/Lightning-AI/lightning/pull/15637))


-
- Removed deprecated code in `pytorch_lightning.utilities.meta` ([#16038](https://github.com/Lightning-AI/lightning/pull/16038))


### Fixed
Expand Down
78 changes: 1 addition & 77 deletions src/pytorch_lightning/utilities/meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,88 +11,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from contextlib import contextmanager
from typing import Any, Callable, Generator, Mapping, Optional, Set, Type, Union
from typing import Mapping, Optional, Union

from lightning_utilities.core.imports import module_available
from torch import Tensor
from torch.nn import Module, Parameter

from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation


def is_meta_init() -> bool:
rank_zero_deprecation(
"`pytorch_lightning.utilities.meta.is_meta_init` is deprecated in v1.8 and will be removed in v1.9."
" The function has become a no-op."
" Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx"
)
return False


def init_meta(module_fn: Callable[..., Module], *args: Any, **kwargs: Any) -> None:
rank_zero_deprecation(
"`pytorch_lightning.utilities.meta.init_meta` is deprecated in v1.8 and will be removed in v1.9."
" The function has become a no-op."
" Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx"
)


def get_all_subclasses(cls: Type) -> Set[Type]:
rank_zero_deprecation(
"`pytorch_lightning.utilities.meta.get_all_subclasses` is deprecated in v1.8 and will be removed in v1.9."
" Please copy its implementation if you have a use for it."
)
from lightning_utilities.core.inheritance import get_all_subclasses as new_get_all_subclasses

return new_get_all_subclasses(cls)


def recursively_setattr(root_module: Any, prefix: str, materialized_module: Module) -> None:
rank_zero_deprecation(
"`pytorch_lightning.utilities.meta.recursively_setattr` is deprecated in v1.8 and will be removed in v1.9."
" Please copy its implementation if you have a use for it."
)
*path, name = prefix.split(".")
for p in path:
root_module = getattr(root_module, p)

try:
index = int(name)
root_module[index] = materialized_module
except ValueError:
setattr(root_module, name, materialized_module)


def materialize_module(root_module: Module) -> None:
rank_zero_deprecation(
"`pytorch_lightning.utilities.meta.materialize_module` is deprecated in v1.8 and will be removed in v1.9."
" The function has become a no-op."
" Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx"
)


@contextmanager
def init_meta_context() -> Generator:
rank_zero_deprecation(
"`pytorch_lightning.utilities.meta.init_meta_context` is deprecated in v1.8 and will be removed in v1.9."
" The function has become a no-op."
" Please check out the `torchdistx` project instead: https://github.com/pytorch/torchdistx"
)
yield


def is_on_meta_device(module: Module) -> bool:
rank_zero_deprecation(
"`pytorch_lightning.utilities.meta.is_on_meta_device` is deprecated in v1.8 and will be removed in v1.9."
" Please copy its implementation if you have a use for it."
)
try:
param = next(module.parameters())
return param.is_meta
except StopIteration:
return False


def _is_deferred(module: Optional[Module]) -> bool:
if module is None or not module_available("torchdistx.fake"):
Expand Down
14 changes: 0 additions & 14 deletions tests/tests_pytorch/deprecated_api/test_remove_1-9.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,17 +223,3 @@ def test_gpu_accelerator_deprecation_warning():
def test_v1_9_0_deprecated_lightning_deepspeed_module():
with pytest.deprecated_call(match=r"has been deprecated in v1.7.1 and will be removed in v1.9."):
_ = LightningDeepSpeedModule(BoringModel(), 32)


def test_meta_utility_deprecations():
import pytorch_lightning.utilities.meta as meta

pytest.deprecated_call(meta.is_meta_init, match="is_meta_init.*removed in v1.9")
pytest.deprecated_call(meta.init_meta, Mock(), match="init_meta.*removed in v1.9")
pytest.deprecated_call(meta.get_all_subclasses, Mock, match="get_all_subclasses.*removed in v1.9")
pytest.deprecated_call(meta.recursively_setattr, Mock(), "foo", 1, match="recursively_setattr.*removed in v1.9")
pytest.deprecated_call(meta.materialize_module, Mock(), match="materialize_module.*removed in v1.9")
with pytest.deprecated_call(match="init_meta_context.*removed in v1.9"):
with meta.init_meta_context():
pass
pytest.deprecated_call(meta.is_on_meta_device, LightningModule(), match="is_on_meta_device.*removed in v1.9")

0 comments on commit 53bf714

Please sign in to comment.