Skip to content

Commit

Permalink
Make tbptt imports Python 3.10 compatible (#13973)
Browse files Browse the repository at this point in the history
* Make tbptt imports Python 3.10 compatible

* add chlog
  • Loading branch information
awaelchli authored Aug 2, 2022
1 parent 2919dcf commit 0fbfbf9
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 4 deletions.
3 changes: 3 additions & 0 deletions src/pytorch_lightning/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -405,6 +405,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed default `amp_level` for `DeepSpeedPrecisionPlugin` to `O2` ([#13897](https://github.com/PyTorchLightning/pytorch-lightning/pull/13897))


- Fixed Python 3.10 compatibility for truncated back-propagation through time (TBPTT) ([#13973](https://github.com/Lightning-AI/lightning/pull/13973))



## [1.6.5] - 2022-07-13

Expand Down
8 changes: 4 additions & 4 deletions src/pytorch_lightning/core/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.
"""The LightningModule - an nn.Module with many additional features."""

import collections
import collections.abc
import inspect
import logging
import numbers
Expand Down Expand Up @@ -1712,7 +1712,7 @@ def tbptt_split_batch(self, batch, split_size):
for i, x in enumerate(batch):
if isinstance(x, torch.Tensor):
split_x = x[:, t:t + split_size]
elif isinstance(x, collections.Sequence):
elif isinstance(x, collections.abc.Sequence):
split_x = [None] * len(x)
for batch_idx in range(len(x)):
split_x[batch_idx] = x[batch_idx][t:t + split_size]
Expand All @@ -1726,7 +1726,7 @@ def tbptt_split_batch(self, batch, split_size):
if :paramref:`~pytorch_lightning.core.module.LightningModule.truncated_bptt_steps` > 0.
Each returned batch split is passed separately to :meth:`training_step`.
"""
time_dims = [len(x[0]) for x in batch if isinstance(x, (Tensor, collections.Sequence))]
time_dims = [len(x[0]) for x in batch if isinstance(x, (Tensor, collections.abc.Sequence))]
assert len(time_dims) >= 1, "Unable to determine batch time dimension"
assert all(x == time_dims[0] for x in time_dims), "Batch time dimension length is ambiguous"

Expand All @@ -1736,7 +1736,7 @@ def tbptt_split_batch(self, batch, split_size):
for i, x in enumerate(batch):
if isinstance(x, Tensor):
split_x = x[:, t : t + split_size]
elif isinstance(x, collections.Sequence):
elif isinstance(x, collections.abc.Sequence):
split_x = [None] * len(x)
for batch_idx in range(len(x)):
split_x[batch_idx] = x[batch_idx][t : t + split_size]
Expand Down

0 comments on commit 0fbfbf9

Please sign in to comment.