Skip to content

Commit

Permalink
Remove deprecated methods/parameters from algorithms and opflow (Qisk…
Browse files Browse the repository at this point in the history
…it#5777)

* Remove deprecated methods/parameters from algorithms and opflow

* Revert grover change as it is done in another PR

Co-authored-by: Julien Gacon <[email protected]>
  • Loading branch information
manoelmarques and Cryoris authored Feb 3, 2021
1 parent 84814b5 commit 7cbee4d
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 37 deletions.
17 changes: 1 addition & 16 deletions qiskit/algorithms/optimizers/aqgd.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# This code is part of Qiskit.
#
# (C) Copyright IBM 2019, 2020.
# (C) Copyright IBM 2019, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
Expand All @@ -13,7 +13,6 @@
"""Analytical Quantum Gradient Descent (AQGD) optimizer."""

import logging
import warnings
from typing import Callable, Tuple, List, Dict, Union

import numpy as np
Expand Down Expand Up @@ -50,7 +49,6 @@ def __init__(self,
maxiter: Union[int, List[int]] = 1000,
eta: Union[float, List[float]] = 1.0,
tol: float = 1e-6, # this is tol
disp: bool = False,
momentum: Union[float, List[float]] = 0.25,
param_tol: float = 1e-6,
averaging: int = 10) -> None:
Expand All @@ -64,7 +62,6 @@ def __init__(self,
tol: Tolerance for change in windowed average of objective values.
Convergence occurs when either objective tolerance is met OR parameter
tolerance is met.
disp: Set to True to display convergence messages.
momentum: Bias towards the previous gradient momentum in current
update. Must be within the bounds: [0,1)
param_tol: Tolerance for change in norm of parameters.
Expand Down Expand Up @@ -93,12 +90,6 @@ def __init__(self,
self._param_tol = param_tol
self._tol = tol
self._averaging = averaging
if disp:
warnings.warn('The disp parameter is deprecated as of '
'0.8.0 and will be removed no sooner than 3 months after the release. '
'The information is now available if you enable INFO level logging.',
DeprecationWarning, stacklevel=2)
self._disp = disp

# state
self._avg_objval = None
Expand Down Expand Up @@ -299,9 +290,6 @@ def optimize(self,
converged = False
for (eta, mom_coeff) in zip(self._eta, self._momenta_coeff):
logger.info("Epoch: %4d | Stepsize: %6.4f | Momentum: %6.4f", epoch, eta, mom_coeff)
if self._disp:
print("Epoch: {:4d} | Stepsize: {:6.4f} | Momentum: {:6.4f}"
.format(epoch, eta, mom_coeff))

sum_max_iters = sum(self._maxiter[0:epoch + 1])
while iter_count < sum_max_iters:
Expand All @@ -323,9 +311,6 @@ def optimize(self,

logger.info(" Iter: %4d | Obj: %11.6f | Grad Norm: %f",
iter_count, objval, np.linalg.norm(gradient, ord=np.inf))
if self._disp:
print(" Iter: {:4d} | Obj: {:11.6f} | Grad Norm: {:f}"
.format(iter_count, objval, np.linalg.norm(gradient, ord=np.inf)))

# Check for objective convergence
converged = self._converged_objective(objval, self._tol, self._averaging)
Expand Down
11 changes: 1 addition & 10 deletions qiskit/algorithms/optimizers/gsls.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@

"""Line search with Gaussian-smoothed samples on a sphere."""

import warnings
from typing import Dict, Optional, Tuple, List, Callable
import logging
import numpy as np
Expand Down Expand Up @@ -48,8 +47,7 @@ def __init__(self,
step_size_multiplier: float = 0.4,
armijo_parameter: float = 1.0e-1,
min_gradient_norm: float = 1e-8,
max_failed_rejection_sampling: int = 50,
max_iter: Optional[int] = None) -> None:
max_failed_rejection_sampling: int = 50) -> None:
"""
Args:
maxiter: Maximum number of iterations.
Expand All @@ -67,15 +65,8 @@ def __init__(self,
min_gradient_norm: If the gradient norm is below this threshold, the algorithm stops.
max_failed_rejection_sampling: Maximum number of attempts to sample points within
bounds.
max_iter: Deprecated, use maxiter.
"""
super().__init__()
if max_iter is not None:
warnings.warn('The max_iter parameter is deprecated as of '
'0.8.0 and will be removed no sooner than 3 months after the release. '
'You should use maxiter instead.',
DeprecationWarning)
maxiter = max_iter
for k, v in list(locals().items()):
if k in self._OPTIONS:
self._options[k] = v
Expand Down
13 changes: 2 additions & 11 deletions qiskit/algorithms/optimizers/spsa.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@

"""Simultaneous Perturbation Stochastic Approximation optimizer."""

import warnings
from typing import Optional, List, Callable
from typing import List, Callable
import logging

import numpy as np
Expand Down Expand Up @@ -70,8 +69,7 @@ def __init__(self,
c2: float = 0.602,
c3: float = 0.101,
c4: float = 0,
skip_calibration: bool = False,
max_trials: Optional[int] = None) -> None:
skip_calibration: bool = False) -> None:
"""
Args:
maxiter: Maximum number of iterations to perform.
Expand All @@ -84,17 +82,10 @@ def __init__(self,
c3: The gamma in the paper, and it is used to adjust c (c1) at each iteration.
c4: The parameter used to control a as well.
skip_calibration: Skip calibration and use provided c(s) as is.
max_trials: Deprecated, use maxiter.
"""
validate_min('save_steps', save_steps, 1)
validate_min('last_avg', last_avg, 1)
super().__init__()
if max_trials is not None:
warnings.warn('The max_trials parameter is deprecated as of '
'0.8.0 and will be removed no sooner than 3 months after the release. '
'You should use maxiter instead.',
DeprecationWarning)
maxiter = max_trials
for k, v in list(locals().items()):
if k in self._OPTIONS:
self._options[k] = v
Expand Down

0 comments on commit 7cbee4d

Please sign in to comment.