Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implementation of pennylane optimizers #101

Merged
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
1c208eb
implement some optimization methods from pennylane
raulconchello Oct 5, 2022
90e68eb
adding the spsa pennylane optimizer
raulconchello Oct 6, 2022
9d5439a
Documentation for pennylane optimizers
raulconchello Oct 12, 2022
a331046
Documentation for pennylane optimizers
raulconchello Oct 12, 2022
3054af6
Added pennylane dependency in setup.py
raulconchello Oct 12, 2022
985674a
Added a test for pennylane optimizers
raulconchello Oct 12, 2022
7e2a022
Documenttion for pennylane optimizers test
raulconchello Oct 12, 2022
27b0ef7
The proposed changes have been modified
raulconchello Oct 13, 2022
ed96166
codecov added
raulconchello Oct 13, 2022
ed9bdb6
Merge branch 'dev' into dev_PennyLane_optimization_methods
raulconchello Oct 19, 2022
9608b5e
Merge branch 'dev' into dev_PennyLane_optimization_methods
raulconchello Oct 20, 2022
115b576
Documentation updated
raulconchello Oct 21, 2022
828a787
Solving bug in optimizers pennylane tests
raulconchello Oct 28, 2022
1394b96
Documentation
raulconchello Oct 28, 2022
f870c2f
Add custm optmzrs in ALLOWED_MINIMIZATION_METHODS
raulconchello Oct 28, 2022
a5ebaff
Change in step computation depending on the method
raulconchello Oct 31, 2022
10e999b
Merge branch 'dev' into dev_PennyLane_optimization_methods
raulconchello Nov 1, 2022
eb3a47e
Making training_vqa (for pennylane) more readable
Nov 8, 2022
1e714c7
Better tests for the pennylane optimizers
Nov 8, 2022
3724e2f
Requirements
Nov 8, 2022
a1f6118
Merge branch 'dev' into dev_PennyLane_optimization_methods
Nov 8, 2022
5e04b2a
Adding CustomScipyPennyLaneOptimizer
Nov 14, 2022
6d55818
Debugging
Nov 15, 2022
1ce7470
PennyLaneOptimizer
Nov 15, 2022
2e3020e
Merge branch 'dev' into dev_PennyLane_optimization_methods
Nov 15, 2022
3a6d320
Creating PennyLane folder
Nov 15, 2022
5557073
Removing the PennyLane requirement
Nov 16, 2022
4473e27
Deleting some prints
Nov 16, 2022
d66524d
Added NOTICE, it specifies code is from PennyLane
Nov 16, 2022
24a64dd
Requirements.txt updated
Nov 16, 2022
3b196fb
Docs optimizers
Nov 16, 2022
c3a7fb7
Debugging
Nov 16, 2022
88a6ac5
Debugging docs
Nov 16, 2022
97f7aae
Debugging
Nov 16, 2022
ce791ee
Merge branch 'dev' into dev_PennyLane_optimization_methods
Nov 16, 2022
98dde82
except Exception as e. In PennyLane opt
Nov 16, 2022
d164b9c
Requirements with >=
Nov 18, 2022
2aebedb
Documentation and copyright
Nov 18, 2022
7f8c55d
Cleaning imports of optimization_methods_pennylane
Nov 18, 2022
d478b53
Documentation
Nov 18, 2022
4fa48d3
Requirements -> autoray>=0.3.1
Nov 18, 2022
9b76633
Requirements -> autoray>=0.3.1
Nov 18, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
141 changes: 141 additions & 0 deletions openqaoa/optimizers/optimization_methods_pennylane.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@

raulconchello marked this conversation as resolved.
Show resolved Hide resolved

from importlib.metadata import requires
from operator import ne
import pennylane as pl
import inspect
from scipy.optimize import OptimizeResult
import numpy as np

import matplotlib.pyplot as plt

def pennylane_optimizer(fun, x0, args=(), maxfev=None, method='vgd', qfim=None,
maxiter=100, tol=10**(-6), jac=None, callback=None,
nums_frequency=None, spectra=None, shifts=None, **options):

'''
Minimize a function `fun` using some pennylane method.
To check available methods look at the available_methods_dict variable.
Read https://docs.pennylane.ai/en/stable/introduction/interfaces.html#optimizers

PARAMETERS
----------
fun : callable
Function to minimize
x0 : ndarray
Initial guess.
args : sequence, optional
Arguments to pass to `func`.
maxfev : int, optional
Maximum number of function evaluations.
method : string, optional
Optimizer method to compute the steps.
qfim : callable, optional (required for natural_grad_descent)
Callable Fubini-Study metric tensor
maxiter : int, optional
Maximum number of iterations.
tol : float
Tolerance before the optimizer terminates; if `tol` is larger than the difference between two steps, terminate optimization.
jac : callable, optinal (required for all methods but rotosolve and spsa)
Callable gradient function.
callback : callable, optional
Called after each iteration, as ``callback(xk)``, where ``xk`` is the
current parameter vector.
options : dict, optional
Dictionary where keys are the arguments for the optimizers object, and
the values are the values to pass to these arguments.
To know all the possible argumets read
https://docs.pennylane.ai/en/stable/introduction/interfaces.html#optimizers.

raulconchello marked this conversation as resolved.
Show resolved Hide resolved

(read https://docs.pennylane.ai/en/stable/code/api/pennylane.RotosolveOptimizer.html#pennylane.RotosolveOptimizer.step)
nums_frequency : dict[dict], required for rotosolve
The number of frequencies in the fun per parameter.
spectra : dict[dict], required for rotosolve
Frequency spectra in the objective_fn per parameter.
shifts : dict[dict], required for rotosolve
Shift angles for the reconstruction per parameter.


RETURNS
-------
OptimizeResult : OptimizeResult
Scipy OptimizeResult object.
'''

def cost(params, **k): # define a function to convert the params list from pennylane to numpy
return fun(np.array(params), *k)

available_methods_dict = { # optimizers implemented
'adagrad': pl.AdagradOptimizer,
'adam': pl.AdamOptimizer,
'vgd': pl.GradientDescentOptimizer,
'momentum': pl.MomentumOptimizer,
'nesterov_momentum': pl.NesterovMomentumOptimizer,
'natural_grad_descent': pl.QNGOptimizer,
'rmsprop': pl.RMSPropOptimizer,
'rotosolve': pl.RotosolveOptimizer,
'spsa': pl.SPSAOptimizer,
}

optimizer = available_methods_dict[method] # define the optimizer

#get optimizer arguments
arguments = inspect.signature(optimizer).parameters.keys()
options_keys = list(options.keys())

#check which values of the options dict can be passed to the optimizer (pop the others)
for key in options_keys:
if key not in arguments: options.pop(key)
if 'maxiter' in arguments: options['maxiter'] = maxiter
raulconchello marked this conversation as resolved.
Show resolved Hide resolved

optimizer = optimizer(**options) #pass the arguments

bestx = pl.numpy.array(x0, requires_grad=True)
besty = cost(x0, *args)
funcalls = 1 # tracks no. of function evals.
niter = 0
improved = True
stop = False

testx = np.copy(bestx)
testy = np.real(besty)
while improved and not stop and niter < maxiter:
improved = False
niter += 1
raulconchello marked this conversation as resolved.
Show resolved Hide resolved

# compute step
if qfim: #natural_grad_descent
raulconchello marked this conversation as resolved.
Show resolved Hide resolved
testx, testy = optimizer.step_and_cost(cost, bestx, *args, grad_fn=jac, metric_tensor_fn=qfim)
elif jac: #adagrad, adam, vgd, momentum, nesterov_momentum, rmsprop
testx, testy = optimizer.step_and_cost(cost, bestx, *args, grad_fn=jac)
elif method=='rotosolve':
testx, testy = optimizer.step_and_cost(
cost, bestx, *args,
nums_frequency={'params': {(i,):1 for i in range(bestx.size)}} if not nums_frequency else nums_frequency,
spectra=spectra,
shifts=shifts,
full_output=False,
)
else: #spsa
testx, testy = optimizer.step_and_cost(cost, bestx, *args)

# check if stable
if np.abs(besty-testy) < tol and niter > 2:
improved = False

else:
besty = testy
bestx = testx
improved = True

if callback is not None:
callback(bestx)
if maxfev is not None and funcalls >= maxfev:
stop = True
break

return OptimizeResult(fun=besty, x=np.array(bestx), nit=niter,
nfev=funcalls, success=(niter > 1))


20 changes: 19 additions & 1 deletion openqaoa/optimizers/training_vqa.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from ..basebackend import VQABaseBackend
from ..qaoa_parameters.baseparams import QAOAVariationalBaseParams
from . import optimization_methods as om
from . import optimization_methods_pennylane as ompl

from .logger_vqa import Logger
from .result import Result
Expand Down Expand Up @@ -469,10 +470,16 @@ class CustomScipyGradientOptimizer(OptimizeVQA):
* optimizer_options

* Dictionary of optimiser-specific arguments, defaults to ``None``
* Used also for the pennylande optimizers (and step function) arguments

"""
CUSTOM_GRADIENT_OPTIMIZERS = ['vgd', 'newton',
'rmsprop', 'natural_grad_descent', 'spsa']
'rmsprop', 'natural_grad_descent', 'spsa',
'pennylane',
raulconchello marked this conversation as resolved.
Show resolved Hide resolved
'pennylane adagrad', 'pennylane adam', 'pennylane vgd',
raulconchello marked this conversation as resolved.
Show resolved Hide resolved
'pennylane momentum', 'pennylane nesterov_momentum',
'pennylane natural_grad_descent', 'pennylane rmsprop',
'pennylane rotosolve', 'pennylane spsa']

def __init__(self,
vqa_object: Type[VQABaseBackend],
Expand Down Expand Up @@ -562,6 +569,7 @@ def optimize(self):
:
The optimized return object from the ``scipy.optimize`` package the result is assigned to the attribute ``opt_result``
'''

if self.method == 'vgd':
method = om.grad_descent
elif self.method == 'newton':
Expand All @@ -575,6 +583,16 @@ def optimize(self):
elif self.method == 'spsa':
print("Warning : SPSA is an experimental feature.")
method = om.SPSA
elif self.method.lower().split()[0] == 'pennylane': # check if we are using a pennylane optimizer
method = ompl.pennylane_optimizer
raulconchello marked this conversation as resolved.
Show resolved Hide resolved

if len(self.method.split()) > 1: # check if we are not using the default (vgd)
self.options['method'] = self.method.lower().split()[1]

if self.options['method'] == 'natural_grad_descent':
self.options['qfim'] = qfim(self.vqa_object, self.variational_params, self.log)
elif self.options['method'] in ['spsa', 'rotosolve']:
raulconchello marked this conversation as resolved.
Show resolved Hide resolved
self.jac = None

try:
if self.hess == None:
Expand Down
3 changes: 2 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
"matplotlib>=3.4.3, <3.5.0",
"qiskit>=0.36.1",
"pyquil>=3.1.0",
"docplex>=2.23.1"
"docplex>=2.23.1",
"pennylane>=0.26.0"
]

requirements_docs = [
Expand Down
50 changes: 50 additions & 0 deletions tests/test_optimizers_pennylane.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import warnings
import unittest

import networkx as nx
from openqaoa.workflows.optimizer import QAOA
from openqaoa.devices import create_device
from openqaoa.problems.problem import MaximumCut
from openqaoa.optimizers.training_vqa import CustomScipyGradientOptimizer


#create a problem
nodes = 4
edge_probability = 0.6
g = nx.generators.fast_gnp_random_graph(n=nodes,p=edge_probability)
maxcut_prob = MaximumCut(g)
maxcut_qubo = maxcut_prob.get_qubo_problem()


class TestPennylaneOptimizers(unittest.TestCase):
raulconchello marked this conversation as resolved.
Show resolved Hide resolved

def _run_method(self, method):
" function tu run the test for any method "
q = QAOA()
device = create_device(location='local', name='qiskit.statevector_simulator')
q.set_device(device)
raulconchello marked this conversation as resolved.
Show resolved Hide resolved


q.set_circuit_properties(p=2, param_type='standard', init_type='rand', mixer_hamiltonian='x')
q.set_backend_properties(prepend_state=None, append_state=None)
q.set_classical_optimizer(method=method, maxiter=4, optimizer_options = {'blocking':False, 'resamplings': 0},
optimization_progress=True, cost_progress=True, parameter_log=True, jac='finite_difference')

q.compile(maxcut_qubo)
q.optimize()

def test_pennylane_optimizers(self):
" function to run the tests for pennylane optimizers "
list_optimizers = CustomScipyGradientOptimizer.CUSTOM_GRADIENT_OPTIMIZERS

for opt in list_optimizers:
if opt.split()[0] == "pennylane":
self._run_method(opt)




if __name__ == "__main__":
with warnings.catch_warnings():
warnings.simplefilter('ignore', category=PendingDeprecationWarning)
unittest.main()