Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[python-package] fix mypy errors in sklearn.py #4837

Merged
merged 4 commits into from
Dec 2, 2021
Merged
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions python-package/lightgbm/sklearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
"""Scikit-learn wrapper interface for LightGBM."""
import copy
from inspect import signature
from typing import Callable, Dict, List, Optional, Tuple, Union
from typing import Any, Callable, Dict, List, Optional, Tuple, Union

import numpy as np

from .basic import Dataset, LightGBMError, _ArrayLike, _choose_param_value, _ConfigAliases, _log_warning
from .basic import Booster, Dataset, LightGBMError, _ArrayLike, _choose_param_value, _ConfigAliases, _log_warning
from .callback import log_evaluation, record_evaluation
from .compat import (SKLEARN_INSTALLED, LGBMNotFittedError, _LGBMAssertAllFinite, _LGBMCheckArray,
_LGBMCheckClassificationTargets, _LGBMCheckSampleWeight, _LGBMCheckXY, _LGBMClassifierBase,
Expand Down Expand Up @@ -514,11 +514,11 @@ def __init__(
self.random_state = random_state
self.n_jobs = n_jobs
self.importance_type = importance_type
self._Booster = None
self._Booster: Optional[Booster] = None
self._evals_result = None
self._best_score = None
self._best_iteration = None
self._other_params = {}
self._other_params: Dict[str, Any] = {}
self._objective = objective
self.class_weight = class_weight
self._class_weight = None
Expand Down Expand Up @@ -855,7 +855,7 @@ def n_estimators_(self) -> int:
"""
if not self.__sklearn_is_fitted__():
raise LGBMNotFittedError('No n_estimators found. Need to call fit beforehand.')
return self._Booster.current_iteration()
return self._Booster.current_iteration() # type: ignore

@property
def n_iter_(self) -> int:
Expand All @@ -866,7 +866,7 @@ def n_iter_(self) -> int:
"""
if not self.__sklearn_is_fitted__():
raise LGBMNotFittedError('No n_iter found. Need to call fit beforehand.')
return self._Booster.current_iteration()
return self._Booster.current_iteration() # type: ignore

@property
def booster_(self):
Expand Down Expand Up @@ -920,7 +920,7 @@ def fit(self, X, y,
categorical_feature=categorical_feature, callbacks=callbacks, init_model=init_model)
return self

_base_doc = LGBMModel.fit.__doc__.replace("self : LGBMModel", "self : LGBMRegressor")
_base_doc = LGBMModel.fit.__doc__.replace("self : LGBMModel", "self : LGBMRegressor") # type: ignore
_base_doc = (_base_doc[:_base_doc.find('group :')] # type: ignore
+ _base_doc[_base_doc.find('eval_set :'):]) # type: ignore
_base_doc = (_base_doc[:_base_doc.find('eval_class_weight :')]
Expand Down Expand Up @@ -987,7 +987,7 @@ def fit(self, X, y,
callbacks=callbacks, init_model=init_model)
return self

_base_doc = LGBMModel.fit.__doc__.replace("self : LGBMModel", "self : LGBMClassifier")
_base_doc = LGBMModel.fit.__doc__.replace("self : LGBMModel", "self : LGBMClassifier") # type: ignore
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

warnings about .__doc__ being Optional[str] could be avoided by changing this to something like getattr(LGBMModel.fit, "__doc__", ""), but I think this is preferable, so that .replace() raises an exception in CI if something is accidentally changed that results in the docs being empty for a method.

_base_doc = (_base_doc[:_base_doc.find('group :')] # type: ignore
+ _base_doc[_base_doc.find('eval_set :'):]) # type: ignore
fit.__doc__ = (_base_doc[:_base_doc.find('eval_group :')]
Expand Down Expand Up @@ -1086,7 +1086,7 @@ def fit(self, X, y,
categorical_feature=categorical_feature, callbacks=callbacks, init_model=init_model)
return self

_base_doc = LGBMModel.fit.__doc__.replace("self : LGBMModel", "self : LGBMRanker")
_base_doc = LGBMModel.fit.__doc__.replace("self : LGBMModel", "self : LGBMRanker") # type: ignore
fit.__doc__ = (_base_doc[:_base_doc.find('eval_class_weight :')] # type: ignore
+ _base_doc[_base_doc.find('eval_init_score :'):]) # type: ignore
_base_doc = fit.__doc__
Expand Down