Skip to content

Commit

Permalink
[Typing][A-73,A-77] Add type annotations for some vision models (`Ale…
Browse files Browse the repository at this point in the history
…xNet`, `LeNet`) (PaddlePaddle#65283)
  • Loading branch information
DrRyanHuang authored and co63oc committed Jun 25, 2024
1 parent 999b547 commit ab58e7d
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 16 deletions.
48 changes: 34 additions & 14 deletions python/paddle/vision/models/alexnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import math
from typing import (
TYPE_CHECKING,
TypedDict,
)

from typing_extensions import NotRequired, Unpack

import paddle
import paddle.nn.functional as F
from paddle import nn
from paddle._typing import Size2
from paddle.base.param_attr import ParamAttr
from paddle.nn import Conv2D, Dropout, Linear, MaxPool2D, ReLU
from paddle.nn.initializer import Uniform
Expand All @@ -31,19 +40,26 @@

__all__ = []

if TYPE_CHECKING:
from paddle import Tensor


class _AlexNetOptions(TypedDict):
num_classes: NotRequired[int]


class ConvPoolLayer(nn.Layer):
def __init__(
self,
input_channels,
output_channels,
filter_size,
stride,
padding,
stdv,
groups=1,
act=None,
):
input_channels: int,
output_channels: int,
filter_size: Size2,
stride: Size2,
padding: Size2,
stdv: float,
groups: int = 1,
act: str | None = None,
) -> None:
super().__init__()

self.relu = ReLU() if act == "relu" else None
Expand All @@ -60,7 +76,7 @@ def __init__(
)
self._pool = MaxPool2D(kernel_size=3, stride=2, padding=0)

def forward(self, inputs):
def forward(self, inputs: Tensor) -> Tensor:
x = self._conv(inputs)
if self.relu is not None:
x = self.relu(x)
Expand Down Expand Up @@ -93,7 +109,7 @@ class AlexNet(nn.Layer):
[1, 1000]
"""

def __init__(self, num_classes=1000):
def __init__(self, num_classes: int = 1000) -> None:
super().__init__()
self.num_classes = num_classes
stdv = 1.0 / math.sqrt(3 * 11 * 11)
Expand Down Expand Up @@ -147,7 +163,7 @@ def __init__(self, num_classes=1000):
bias_attr=ParamAttr(initializer=Uniform(-stdv, stdv)),
)

def forward(self, inputs):
def forward(self, inputs: Tensor) -> Tensor:
x = self._conv1(inputs)
x = self._conv2(x)
x = self._conv3(x)
Expand All @@ -169,7 +185,9 @@ def forward(self, inputs):
return x


def _alexnet(arch, pretrained, **kwargs):
def _alexnet(
arch: str, pretrained: bool, **kwargs: Unpack[_AlexNetOptions]
) -> AlexNet:
model = AlexNet(**kwargs)

if pretrained:
Expand All @@ -186,7 +204,9 @@ def _alexnet(arch, pretrained, **kwargs):
return model


def alexnet(pretrained=False, **kwargs):
def alexnet(
pretrained: bool = False, **kwargs: Unpack[_AlexNetOptions]
) -> AlexNet:
"""AlexNet model from
`"ImageNet Classification with Deep Convolutional Neural Networks"
<https://proceedings.neurips.cc/paper/2012/file/c399862d3b9d6b76c8436e924a68c45b-Paper.pdf>`_.
Expand Down
13 changes: 11 additions & 2 deletions python/paddle/vision/models/lenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

from typing import (
TYPE_CHECKING,
)

import paddle
from paddle import nn

if TYPE_CHECKING:
from paddle import Tensor

__all__ = []


Expand Down Expand Up @@ -44,7 +53,7 @@ class LeNet(nn.Layer):
[1, 10]
"""

def __init__(self, num_classes=10):
def __init__(self, num_classes: int = 10) -> None:
super().__init__()
self.num_classes = num_classes
self.features = nn.Sequential(
Expand All @@ -63,7 +72,7 @@ def __init__(self, num_classes=10):
nn.Linear(84, num_classes),
)

def forward(self, inputs):
def forward(self, inputs: Tensor) -> Tensor:
x = self.features(inputs)

if self.num_classes > 0:
Expand Down

0 comments on commit ab58e7d

Please sign in to comment.