Skip to content

Commit

Permalink
✨ complete scipy.optimize.basinhopping
Browse files Browse the repository at this point in the history
  • Loading branch information
jorenham committed Nov 28, 2024
1 parent 0b89e24 commit 1d0ea12
Show file tree
Hide file tree
Showing 2 changed files with 84 additions and 20 deletions.
90 changes: 73 additions & 17 deletions scipy-stubs/optimize/_basinhopping.pyi
Original file line number Diff line number Diff line change
@@ -1,22 +1,78 @@
from scipy._typing import Untyped
from collections.abc import Callable, Mapping
from typing import Any, Concatenate, Generic, Literal, Protocol, TypeAlias, overload, type_check_only
from typing_extensions import TypeVar

import numpy as np
import optype.numpy as onp
from scipy._typing import Seed
from ._minimize import OptimizeResult as _OptimizeResult

__all__ = ["basinhopping"]

_FT = TypeVar("_FT", bound=onp.ToFloat | onp.ToFloatND)
_FT_contra = TypeVar("_FT_contra", bound=onp.ToFloat | onp.ToFloatND, contravariant=True)
_FT_co = TypeVar(
"_FT_co",
bound=float | np.floating[Any] | onp.ArrayND[np.floating[Any]],
default=float | np.float64 | onp.Array1D[np.float64],
covariant=True,
)

_CallbackFun: TypeAlias = Callable[[onp.Array1D[np.float64], _FT, bool], bool | None]

@type_check_only
class _AcceptTestFun(Protocol[_FT_contra]):
def __call__(
self,
/,
*,
f_new: _FT_contra,
x_new: onp.ToFloat1D,
f_old: _FT_contra,
x_old: onp.ToFloat1D,
) -> onp.ToBool | Literal["force accept"]: ...

@type_check_only
class OptimizeResult(_OptimizeResult[_FT_co], Generic[_FT_co]):
lowest_optimization_result: _OptimizeResult[_FT_co]

###

@overload
def basinhopping(
func: Callable[Concatenate[onp.Array1D[np.float64], ...], onp.ToFloat],
x0: onp.ToFloat1D,
niter: onp.ToJustInt = 100,
T: onp.ToFloat = 1.0,
stepsize: onp.ToFloat = 0.5,
minimizer_kwargs: Mapping[str, object] | None = None,
take_step: Callable[[onp.Array1D[np.float64]], onp.ToFloat] | None = None,
accept_test: _AcceptTestFun[onp.ToFloat] | None = None,
callback: _CallbackFun[float] | _CallbackFun[np.float64] | None = None,
interval: onp.ToJustInt = 50,
disp: onp.ToBool = False,
niter_success: onp.ToJustInt | None = None,
seed: Seed | None = None,
*,
target_accept_rate: onp.ToFloat = 0.5,
stepwise_factor: onp.ToFloat = 0.9,
) -> OptimizeResult[float | np.float64]: ...
@overload
def basinhopping(
func: Untyped,
x0: Untyped,
niter: int = 100,
T: float = 1.0,
stepsize: float = 0.5,
minimizer_kwargs: Untyped | None = None,
take_step: Untyped | None = None,
accept_test: Untyped | None = None,
callback: Untyped | None = None,
interval: int = 50,
disp: bool = False,
niter_success: Untyped | None = None,
seed: Untyped | None = None,
func: Callable[Concatenate[onp.Array1D[np.float64], ...], onp.ToFloat1D],
x0: onp.ToFloat1D,
niter: onp.ToJustInt = 100,
T: onp.ToFloat = 1.0,
stepsize: onp.ToFloat = 0.5,
minimizer_kwargs: Mapping[str, object] | None = None,
take_step: Callable[[onp.Array1D[np.float64]], onp.ToFloat] | None = None,
accept_test: _AcceptTestFun[onp.ToFloat1D] | None = None,
callback: _CallbackFun[onp.Array1D[np.float64]] | None = None,
interval: onp.ToJustInt = 50,
disp: onp.ToBool = False,
niter_success: onp.ToJustInt | None = None,
seed: Seed | None = None,
*,
target_accept_rate: float = 0.5,
stepwise_factor: float = 0.9,
) -> Untyped: ...
target_accept_rate: onp.ToFloat = 0.5,
stepwise_factor: onp.ToFloat = 0.9,
) -> OptimizeResult[onp.Array1D[np.float64]]: ...
14 changes: 11 additions & 3 deletions scipy-stubs/optimize/_minimize.pyi
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from collections.abc import Callable, Mapping, Sequence
from typing import Any, Concatenate, Final, Literal, Protocol, TypeAlias, TypedDict, overload, type_check_only
from typing import Any, Concatenate, Final, Generic, Literal, Protocol, TypeAlias, TypedDict, overload, type_check_only
from typing_extensions import TypeVar

import numpy as np
import optype.numpy as onp
Expand Down Expand Up @@ -114,9 +115,16 @@ class _OptimizeResult_scalar(_OptimizeResult):
nit: int
nfev: int

class OptimizeResult(_OptimizeResult):
_FunT_co = TypeVar(
"_FunT_co",
bound=float | np.floating[Any] | onp.ArrayND[np.floating[Any]],
default=float | np.float64,
covariant=True,
)

class OptimizeResult(_OptimizeResult, Generic[_FunT_co]):
x: _Float1D
fun: float | np.float64
fun: _FunT_co
jac: _Float1D # requires `jac`
hess: _Float2D # requires `hess` or `hessp`
hess_inv: _Float2D | LinearOperator # requires `hess` or `hessp`, depends on solver
Expand Down

0 comments on commit 1d0ea12

Please sign in to comment.