Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Typing][C-50] Add type annotations for python/paddle/distributed/fleet/utils/ps_util.py #66770

Merged
merged 2 commits into from
Jul 31, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 21 additions & 4 deletions python/paddle/distributed/fleet/utils/ps_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,20 @@
# limitations under the License.
"""Parameter Server utils"""

from __future__ import annotations

import os
import warnings
from typing import TYPE_CHECKING

import paddle

if TYPE_CHECKING:
from paddle import Tensor
from paddle.base import Executor
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
from paddle.base import Executor
from paddle.static import Executor

from paddle.distributed.fleet.base.role_maker import RoleMakerBase
from paddle.framework import Program
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
from paddle.framework import Program
from paddle.static import Program

paddle.framework.Program 不是公开 API,而且运行时有点细微差异,比如说 PIR 模式下并没有给 paddle.framework.Program patch 成 PIR 的 Program,虽然 patch 这种行为本身类型检查工具也不可能检查出来就是了……


__all__ = []


Expand All @@ -26,7 +35,11 @@ class DistributedInfer:
Utility class for distributed infer of PaddlePaddle.
"""

def __init__(self, main_program=None, startup_program=None):
def __init__(
self,
main_program: Program | None = None,
startup_program: Program | None = None,
) -> None:
if main_program:
self.origin_main_program = main_program.clone()
else:
Expand All @@ -43,8 +56,12 @@ def __init__(self, main_program=None, startup_program=None):
self.sparse_table_maps = None

def init_distributed_infer_env(
self, exe, loss, role_maker=None, dirname=None
):
self,
exe: Executor,
loss: Tensor,
role_maker: RoleMakerBase | None = None,
dirname: str | None = None,
) -> None:
from paddle.distributed import fleet

if fleet.fleet._runtime_handle is None:
Expand Down Expand Up @@ -112,7 +129,7 @@ def _init_dense_params(self, exe=None, dirname=None):
vars=need_load_vars,
)

def get_dist_infer_program(self):
def get_dist_infer_program(self) -> Program:
varname2tables = self._get_sparse_table_map()
convert_program = self._convert_program(
self.origin_main_program, varname2tables
Expand Down