Skip to content

Commit

Permalink
Bump torch from 2.0.1 to 2.1.0 in /requirements (#18752)
Browse files Browse the repository at this point in the history
Co-authored-by: Jirka <[email protected]>
Co-authored-by: Carlos Mocholí <[email protected]>
  • Loading branch information
3 people authored Oct 24, 2023
1 parent c5a731c commit 73f5df0
Show file tree
Hide file tree
Showing 18 changed files with 81 additions and 65 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/code-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,4 @@ jobs:
pip list
- name: Check typing
run: mypy --no-warn-unused-ignores
run: mypy
13 changes: 3 additions & 10 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -127,17 +127,10 @@ files = [
]
# This section is for folders with "-" as they are not valid python modules
exclude = [
"src/lightning_app/__about__.py",
"src/lightning_app/__setup__.py",
"src/lightning_app/__version__.py",
"src/lightning_fabric/__about__.py",
"src/lightning_fabric/__setup__.py",
"src/lightning_fabric/__version__.py",
"src/lightning/app/cli/app-template",
"src/lightning/app/cli/component-template",
"src/lightning/app/cli/pl-app-template",
"src/lightning/app/cli/react-ui-template",
"src/lightning/app/launcher",
]
install_types = "True"
non_interactive = "True"
Expand Down Expand Up @@ -192,6 +185,9 @@ module = [
"lightning.app.frontend.stream_lit",
"lightning.app.frontend.utils",
"lightning.app.frontend.web",
"lightning.app.launcher.launcher",
"lightning.app.launcher.lightning_backend",
"lightning.app.launcher.lightning_hybrid_backend",
"lightning.app.pdb.pdb",
"lightning.app.runners.backends.backend",
"lightning.app.runners.backends.cloud",
Expand Down Expand Up @@ -240,9 +236,6 @@ module = [
"lightning.app.utilities.state",
"lightning.app.utilities.tracer",
"lightning.app.utilities.tree",
"lightning.store.authentication",
"lightning.store.cloud_api",
"lightning.store.save",
"lightning.store.utils",
]
ignore_errors = "True"
Expand Down
2 changes: 1 addition & 1 deletion requirements/data/data.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ lightning-utilities >=0.8.0, <0.10.0
# to be able to include also 0.6 and preserve `>` needed for CI min version bypass
torchdata >0.5.9, <=0.7.0
# to be able to include also PL 2.0 and preserve `>` needed for CI min version bypass
torch >0.14.0, <=2.1.0
torch >0.14.0, <2.2.0
2 changes: 1 addition & 1 deletion requirements/fabric/examples.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# NOTE: the upper bound for the package version is only set for CI stability, and it is dropped while installing this package
# in case you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment

torchvision >=0.13.0, <0.16.0
torchvision >=0.13.0, <0.17.0
torchmetrics >=0.10.0, <1.3.0
lightning-utilities >=0.8.0, <0.10.0
2 changes: 1 addition & 1 deletion requirements/pytorch/examples.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# NOTE: the upper bound for the package version is only set for CI stability, and it is dropped while installing this package
# in case you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment

torchvision >=0.13.0, <0.16.0
torchvision >=0.13.0, <0.17.0
gym[classic_control] >=0.17.0, <0.27.0
ipython[all] <8.15.0
torchmetrics >=0.10.0, <1.3.0
Expand Down
2 changes: 1 addition & 1 deletion requirements/typing.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
mypy==1.5.1
torch==2.0.1
torch==2.1.0

types-Markdown
types-PyYAML
Expand Down
22 changes: 11 additions & 11 deletions src/lightning/app/core/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,9 +196,9 @@ class StateUpdate(BaseModel):
@fastapi_service.get("/api/v1/state", response_class=JSONResponse)
async def get_state(
response: Response,
x_lightning_type: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_session_uuid: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_session_id: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_type: Optional[str] = Header(None),
x_lightning_session_uuid: Optional[str] = Header(None),
x_lightning_session_id: Optional[str] = Header(None),
) -> Mapping:
if x_lightning_session_uuid is None:
raise Exception("Missing X-Lightning-Session-UUID header")
Expand Down Expand Up @@ -246,8 +246,8 @@ async def get_layout() -> str:
@fastapi_service.get("/api/v1/spec", response_class=JSONResponse)
async def get_spec(
response: Response,
x_lightning_session_uuid: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_session_id: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_session_uuid: Optional[str] = Header(None),
x_lightning_session_id: Optional[str] = Header(None),
) -> Union[List, Dict]:
if x_lightning_session_uuid is None:
raise Exception("Missing X-Lightning-Session-UUID header")
Expand All @@ -266,9 +266,9 @@ async def get_spec(
async def post_delta(
request: Request,
response: Response,
x_lightning_type: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_session_uuid: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_session_id: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_type: Optional[str] = Header(None),
x_lightning_session_uuid: Optional[str] = Header(None),
x_lightning_session_id: Optional[str] = Header(None),
) -> Optional[Dict]:
"""This endpoint is used to make an update to the app state using delta diff, mainly used by streamlit to update
the state."""
Expand All @@ -292,9 +292,9 @@ async def post_delta(
async def post_state(
request: Request,
response: Response,
x_lightning_type: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_session_uuid: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_session_id: Optional[str] = Header(None), # type: ignore[assignment]
x_lightning_type: Optional[str] = Header(None),
x_lightning_session_uuid: Optional[str] = Header(None),
x_lightning_session_id: Optional[str] = Header(None),
) -> Optional[Dict]:
if x_lightning_session_uuid is None:
raise Exception("Missing X-Lightning-Session-UUID header")
Expand Down
4 changes: 2 additions & 2 deletions src/lightning/fabric/plugins/collectives/torch_collective.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,10 @@ def all_to_all(self, output_tensor_list: List[Tensor], input_tensor_list: List[T
return output_tensor_list

def send(self, tensor: Tensor, dst: int, tag: int = 0) -> None:
dist.send(tensor, dst, tag=tag, group=self.group) # type: ignore[arg-type]
dist.send(tensor, dst, tag=tag, group=self.group)

def recv(self, tensor: Tensor, src: Optional[int] = None, tag: int = 0) -> Tensor:
dist.recv(tensor, src, tag=tag, group=self.group) # type: ignore[arg-type]
dist.recv(tensor, src, tag=tag, group=self.group)
return tensor

def all_gather_object(self, object_list: List[Any], obj: Any) -> List[Any]:
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/fabric/plugins/precision/fsdp.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def unscale_gradients(self, optimizer: Optimizer) -> None:
if scaler is not None:
if _optimizer_handles_unscaling(optimizer):
raise NotImplementedError("Gradient clipping is not implemented for optimizers handling the unscaling.")
scaler.unscale_(optimizer) # type: ignore[arg-type] # ShardedGradScaler has wrong type annotation
scaler.unscale_(optimizer)

def state_dict(self) -> Dict[str, Any]:
if self.scaler is not None:
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/fabric/strategies/xla_fsdp.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ def clip_gradients_norm(
) -> Tensor:
"""Clip gradients by norm."""
self.precision.unscale_gradients(optimizer)
return module.clip_grad_norm_(max_norm=max_norm, norm_type=norm_type) # type: ignore[operator]
return module.clip_grad_norm_(max_norm=max_norm, norm_type=norm_type)

def clip_gradients_value(self, module: Module, optimizer: Optimizer, clip_val: Union[float, int]) -> None:
"""Clip gradients by value."""
Expand Down
27 changes: 23 additions & 4 deletions src/lightning/fabric/utilities/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
from typing import Any, Callable, Dict, Iterator, List, Optional, Protocol, TypeVar, Union, runtime_checkable
from typing import (
Any,
Callable,
DefaultDict,
Dict,
Iterator,
List,
Optional,
Protocol,
TypeVar,
Union,
runtime_checkable,
)

import torch
from torch import Tensor
from torch.optim import Optimizer
from typing_extensions import TypeAlias
from typing_extensions import TypeAlias, overload

from lightning.fabric.utilities.imports import _TORCH_GREATER_EQUAL_1_13, _TORCH_GREATER_EQUAL_2_0

Expand Down Expand Up @@ -117,7 +129,14 @@ def step(self, metrics: Union[float, int, Tensor], epoch: Optional[int] = None)
class Steppable(Protocol):
"""To structurally type ``optimizer.step()``"""

# Inferred from `torch.optim.optimizer.pyi`
@overload
def step(self, closure: None = ...) -> None:
...

@overload
def step(self, closure: Callable[[], float]) -> float:
...

def step(self, closure: Optional[Callable[[], float]] = ...) -> Optional[float]:
...

Expand All @@ -128,7 +147,7 @@ class Optimizable(Steppable, Protocol):

param_groups: List[Dict[Any, Any]]
defaults: Dict[Any, Any]
state: Dict[Any, Any]
state: DefaultDict[Tensor, Any]

def state_dict(self) -> Dict[str, Dict[Any, Any]]:
...
Expand Down
18 changes: 7 additions & 11 deletions src/lightning/pytorch/_graveyard/_torchmetrics.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import contextlib
from typing import Callable

import torchmetrics
from lightning_utilities.core.imports import compare_version as _compare_version

from lightning.pytorch.utilities.imports import _TORCHMETRICS_GREATER_EQUAL_0_8_0
from lightning.pytorch.utilities.migration.utils import _patch_pl_to_mirror_if_necessary


Expand All @@ -12,13 +12,9 @@ def compare_version(package: str, op: Callable, version: str, use_base_version:
return _compare_version(new_package, op, version, use_base_version)


# patching is necessary, since up to v.0.7.3 torchmetrics has a hardcoded reference to lightning.pytorch,
# which has to be redirected to the unified package:
# https://github.com/Lightning-AI/metrics/blob/v0.7.3/torchmetrics/metric.py#L96
with contextlib.suppress(AttributeError):
if hasattr(torchmetrics.utilities.imports, "_compare_version"):
torchmetrics.utilities.imports._compare_version = compare_version

with contextlib.suppress(AttributeError):
if hasattr(torchmetrics.metric, "_compare_version"):
torchmetrics.metric._compare_version = compare_version
if not _TORCHMETRICS_GREATER_EQUAL_0_8_0:
# up to v0.8.0 torchmetrics had a hardcoded reference to lightning.pytorch which has to be redirected to the
# unified package. this was removed in
# https://github.com/Lightning-AI/torchmetrics/commit/b225889b34b83272117b758cbc28772a5c2356d9
torchmetrics.utilities.imports._compare_version = compare_version
torchmetrics.metric._compare_version = compare_version
2 changes: 1 addition & 1 deletion src/lightning/pytorch/callbacks/finetuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def flatten_modules(modules: Union[Module, Iterable[Union[Module, Iterable]]]) -

if isinstance(modules, Iterable):
_flatten_modules = []
for m in modules:
for m in modules: # type: ignore[union-attr]
_flatten_modules.extend(BaseFinetuning.flatten_modules(m))

_modules = iter(_flatten_modules)
Expand Down
14 changes: 11 additions & 3 deletions src/lightning/pytorch/core/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.
from contextlib import contextmanager
from dataclasses import fields
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union, overload
from weakref import proxy

import torch
Expand Down Expand Up @@ -393,9 +393,17 @@ def load_state_dict(self, state_dict: Dict[Any, Any]) -> None:
def state_dict(self) -> Dict[str, Any]:
return {} # Return Empty

def step(self, closure: Optional[Callable] = None) -> None:
@overload
def step(self, closure: None = ...) -> None:
...

@overload
def step(self, closure: Callable[[], float]) -> float:
...

def step(self, closure: Optional[Callable[[], float]] = None) -> Optional[float]:
if closure is not None:
closure()
return closure()

def zero_grad(self, set_to_none: Optional[bool] = True) -> None:
pass # Do Nothing
Expand Down
4 changes: 2 additions & 2 deletions src/lightning/pytorch/demos/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,10 @@ def __init__(self, dim: int, dropout: float = 0.1, max_len: int = 5000) -> None:
self.register_parameter("pe", nn.Parameter(pe, requires_grad=False))

def reset_parameters(self) -> None:
self.pe.copy_(self._init_pos_encoding()) # type: ignore[operator]
self.pe.copy_(self._init_pos_encoding())

def forward(self, x: Tensor) -> Tensor:
x + self.pe[: x.size(0), :] # type: ignore[index]
x + self.pe[: x.size(0), :]
return self.dropout(x)

def _init_pos_encoding(self) -> Tensor:
Expand Down
4 changes: 1 addition & 3 deletions src/lightning/pytorch/overrides/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,9 +163,7 @@ def _register_ddp_comm_hook(
def _sync_module_states(module: torch.nn.Module) -> None:
"""Taken from https://github.com/pytorch/pytorch/blob/v2.0.0/torch/nn/parallel/distributed.py#L675-L682."""
parameters_to_ignore = (
set(module._ddp_params_and_buffers_to_ignore) # type: ignore[arg-type]
if hasattr(module, "_ddp_params_and_buffers_to_ignore")
else set()
set(module._ddp_params_and_buffers_to_ignore) if hasattr(module, "_ddp_params_and_buffers_to_ignore") else set()
)
from torch.distributed.distributed_c10d import _get_default_group
from torch.distributed.utils import _sync_module_states as torch_sync_module_states
Expand Down
23 changes: 12 additions & 11 deletions src/lightning/pytorch/utilities/compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,8 @@ def to_uncompiled(model: Union["pl.LightningModule", "torch._dynamo.OptimizedMod
from torch._dynamo import OptimizedModule

if isinstance(model, OptimizedModule):
model = model._orig_mod
if not isinstance(model, pl.LightningModule):
original = model._orig_mod
if not isinstance(original, pl.LightningModule):
raise TypeError(
f"Unexpected error, the wrapped model should be a LightningModule, found {type(model).__name__}"
)
Expand All @@ -99,20 +99,21 @@ def to_uncompiled(model: Union["pl.LightningModule", "torch._dynamo.OptimizedMod
raise ValueError(
"`model` is required to be a compiled LightningModule. Found a non-compiled LightningModule instead."
)
original = model

else:
raise ValueError("`model` must either be an instance of OptimizedModule or LightningModule")

ctx = model._compiler_ctx
ctx = original._compiler_ctx
if ctx is not None:
model.forward = ctx["original_forward"] # type: ignore[method-assign]
model.training_step = ctx["original_training_step"] # type: ignore[method-assign]
model.validation_step = ctx["original_validation_step"] # type: ignore[method-assign]
model.test_step = ctx["original_test_step"] # type: ignore[method-assign]
model.predict_step = ctx["original_predict_step"] # type: ignore[method-assign]
model._compiler_ctx = None

return model
original.forward = ctx["original_forward"] # type: ignore[method-assign]
original.training_step = ctx["original_training_step"] # type: ignore[method-assign]
original.validation_step = ctx["original_validation_step"] # type: ignore[method-assign]
original.test_step = ctx["original_test_step"] # type: ignore[method-assign]
original.predict_step = ctx["original_predict_step"] # type: ignore[method-assign]
original._compiler_ctx = None

return original


def _maybe_unwrap_optimized(model: object) -> "pl.LightningModule":
Expand Down
1 change: 1 addition & 0 deletions src/lightning/pytorch/utilities/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from lightning_utilities.core.rank_zero import rank_zero_warn

_PYTHON_GREATER_EQUAL_3_11_0 = (sys.version_info.major, sys.version_info.minor) >= (3, 11)
_TORCHMETRICS_GREATER_EQUAL_0_8_0 = RequirementCache("torchmetrics>=0.8.0")
_TORCHMETRICS_GREATER_EQUAL_0_9_1 = RequirementCache("torchmetrics>=0.9.1")
_TORCHMETRICS_GREATER_EQUAL_0_11 = RequirementCache("torchmetrics>=0.11.0") # using new API with task
_TORCHMETRICS_GREATER_EQUAL_1_0_0 = RequirementCache("torchmetrics>=1.0.0")
Expand Down

0 comments on commit 73f5df0

Please sign in to comment.