Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add get_best_diff_method and _get_gradient_fn to qml.workflow #6399

Merged
merged 25 commits into from
Oct 25, 2024
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
6ef91ba
added new fxns w/ tests and deprecated
andrijapau Oct 15, 2024
c7caff8
add PR number and link to changelog
andrijapau Oct 15, 2024
f583d4f
Merge branch 'master' into add-best-diff-method
andrijapau Oct 15, 2024
70f2c0e
Merge branch 'master' into add-best-diff-method
andrijapau Oct 17, 2024
51698ca
Merge branch 'master' into add-best-diff-method
andrijapau Oct 18, 2024
6b7359b
remove deprecations from this pr
andrijapau Oct 18, 2024
0bc47e6
update functions and tests
andrijapau Oct 18, 2024
ef0432d
update get_gradient_fn
andrijapau Oct 18, 2024
c06d7b7
fix doc string
andrijapau Oct 18, 2024
2191161
change test
andrijapau Oct 18, 2024
00250ea
change to always return str
andrijapau Oct 21, 2024
1610c1e
code clean up
andrijapau Oct 22, 2024
a121bbd
improve cov and get rid of finitediff
andrijapau Oct 22, 2024
02d2a17
improve cov
andrijapau Oct 22, 2024
c647bee
Merge branch 'master' into add-best-diff-method
andrijapau Oct 22, 2024
ae797d4
Merge branch 'master' into add-best-diff-method
andrijapau Oct 23, 2024
7112f32
use new construct_tape feature
andrijapau Oct 23, 2024
d4335e9
improve docstring
andrijapau Oct 23, 2024
8eb0c05
make get_gradient_fn private and improve docstring
andrijapau Oct 24, 2024
b8ccd78
update changelog
andrijapau Oct 24, 2024
87e13fa
Apply suggestions from code review
andrijapau Oct 24, 2024
a115491
fix spacing
andrijapau Oct 24, 2024
3764f97
code clean up
andrijapau Oct 24, 2024
2edc941
Merge branch 'master' into add-best-diff-method
andrijapau Oct 25, 2024
35ec3b2
retrigger stalled ci
andrijapau Oct 25, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions doc/releases/changelog-dev.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@

<h3>New features since last release</h3>

* Added functions `get_best_diff_method` and `_get_gradient_fn` to `qml.workflow`.
andrijapau marked this conversation as resolved.
Show resolved Hide resolved
[(#6399)](https://github.com/PennyLaneAI/pennylane/pull/6399)

* Add `qml.workflow.construct_tape` as a method for users to construct single tapes from a `QNode`.
[(#6419)](https://github.com/PennyLaneAI/pennylane/pull/6419)

Expand Down
4 changes: 4 additions & 0 deletions pennylane/workflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
~workflow.construct_tape
~workflow.construct_batch
~workflow.get_transform_program
~workflow.get_best_diff_method
~workflow.get_gradient_fn
andrijapau marked this conversation as resolved.
Show resolved Hide resolved

Supported interfaces
~~~~~~~~~~~~~~~~~~~~
Expand Down Expand Up @@ -55,6 +57,8 @@
.. include:: ../../pennylane/workflow/return_types_spec.rst

"""
from .get_best_diff_method import get_best_diff_method
from .get_gradient_fn import _get_gradient_fn
from .construct_batch import construct_batch, get_transform_program
from .construct_tape import construct_tape
from .execution import INTERFACE_MAP, SUPPORTED_INTERFACE_NAMES, execute
Expand Down
78 changes: 78 additions & 0 deletions pennylane/workflow/get_best_diff_method.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# Copyright 2018-2024 Xanadu Quantum Technologies Inc.

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at

# http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains a function for getting the best differentiation method for a given QNode.

"""

from functools import wraps

import pennylane as qml
from pennylane.workflow.qnode import QNode, _make_execution_config


def get_best_diff_method(qnode: QNode):
"""Returns a function that computes the 'best' differentiation method
for a particular QNode.

This method prioritizes differentiation methods in the following order (SPSA-based and Hadamard-based gradients
are not included here):

* ``"device"``
* ``"backprop"``
* ``"parameter-shift"``
andrijapau marked this conversation as resolved.
Show resolved Hide resolved

.. note::

The first differentiation method that is supported (from top to bottom)
will be returned. The order is designed to maximize efficiency, generality,
and stability.

.. seealso::

For a detailed comparison of the backpropagation and parameter-shift methods,
refer to the :doc:`quantum gradients with backpropagation example <demo:demos/tutorial_backprop>`.

Args:
qnode (.QNode): the qnode to get the 'best' differentiation method for.

Returns:
str: the gradient transform.
"""

def handle_return(transform):
"""Helper function to manage the return"""
if transform in (qml.gradients.param_shift, qml.gradients.param_shift_cv):
return "parameter-shift"
return transform

@wraps(qnode)
def wrapper(*args, **kwargs):
device = qnode.device
tape = qml.workflow.construct_tape(qnode)(*args, **kwargs)

config = _make_execution_config(None, "best")

if device.supports_derivatives(config, circuit=tape):
new_config = device.preprocess(config)[1]
transform = new_config.gradient_method
return handle_return(transform)

if tape and any(isinstance(o, qml.operation.CV) for o in tape):
transform = qml.gradients.param_shift_cv
return handle_return(transform)

transform = qml.gradients.param_shift
return handle_return(transform)

return wrapper
92 changes: 92 additions & 0 deletions pennylane/workflow/get_gradient_fn.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
# Copyright 2018-2024 Xanadu Quantum Technologies Inc.

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at

# http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains a function for retrieving the gradient function for a given device or tape.

"""

from typing import Optional, get_args

import pennylane as qml
from pennylane.transforms.core import TransformDispatcher
from pennylane.workflow.qnode import (
SupportedDeviceAPIs,
SupportedDiffMethods,
_make_execution_config,
)


# pylint: disable=too-many-return-statements, unsupported-binary-operation
def _get_gradient_fn(
device: SupportedDeviceAPIs,
diff_method: TransformDispatcher | SupportedDiffMethods = "best",
tape: Optional["qml.tape.QuantumTape"] = None,
):
"""Determines the differentiation method for a given device and diff method.

Args:
device (:class:`~.devices.Device`): PennyLane device
diff_method (str or :class:`~.TransformDispatcher`): The requested method of differentiation. Defaults to ``"best"``.
If a string, allowed options are ``"best"``, ``"backprop"``, ``"adjoint"``,
``"device"``, ``"parameter-shift"``, ``"hadamard"``, ``"finite-diff"``, or ``"spsa"``.
Alternatively, a gradient transform can be provided.
tape (Optional[.QuantumTape]): the circuit that will be differentiated. Should include shots information.

Returns:
str or :class:`~.TransformDispatcher` (the ``gradient_fn``)
"""

if diff_method is None:
return None

config = _make_execution_config(None, diff_method)

if device.supports_derivatives(config, circuit=tape):
new_config = device.preprocess(config)[1]
return new_config.gradient_method

if diff_method in {"backprop", "adjoint", "device"}: # device-only derivatives
raise qml.QuantumFunctionError(
f"Device {device} does not support {diff_method} with requested circuit."
)

if diff_method == "best":
qn = qml.QNode(lambda: None, device, diff_method=None)
return qml.workflow.get_best_diff_method(qn)()

if diff_method == "parameter-shift":
if tape and any(isinstance(o, qml.operation.CV) and o.name != "Identity" for o in tape):
return qml.gradients.param_shift_cv
return qml.gradients.param_shift

if diff_method == "finite-diff":
return qml.gradients.finite_diff

if diff_method == "spsa":
return qml.gradients.spsa_grad

if diff_method == "hadamard":
return qml.gradients.hadamard_grad
andrijapau marked this conversation as resolved.
Show resolved Hide resolved

if isinstance(diff_method, str):
raise qml.QuantumFunctionError(
f"Differentiation method {diff_method} not recognized. Allowed "
f"options are {tuple(get_args(SupportedDiffMethods))}."
)

if isinstance(diff_method, qml.transforms.core.TransformDispatcher):
return diff_method

raise qml.QuantumFunctionError(
f"Differentiation method {diff_method} must be a gradient transform or a string."
)
111 changes: 111 additions & 0 deletions tests/workflow/test_get_best_diff_method.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
# Copyright 2018-2024 Xanadu Quantum Technologies Inc.

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at

# http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the `get_best_diff_method` function"""

import pytest

import pennylane as qml
from pennylane.workflow import get_best_diff_method


def dummy_cv_func(x):
"""A dummy CV function with continuous-variable operations."""
qml.Displacement(x, 0.1, wires=0)
return qml.expval(qml.X(0))


def dummyfunc():
"""dummy func."""
return None


# pylint: disable=unused-argument
class CustomDevice(qml.devices.Device):
"""A null device that just returns 0."""

def __repr__(self):
return "CustomDevice"

def execute(self, circuits, execution_config=None):
return (0,)


class CustomDeviceWithDiffMethod(qml.devices.Device):
"""A device that defines a derivative."""

def execute(self, circuits, execution_config=None):
return 0

def compute_derivatives(self, circuits, execution_config=None):
"""Device defines its own method to compute derivatives"""
return 0


class TestValidation:
"""Tests for QNode creation and validation"""

@pytest.mark.autograd
def test_best_method_is_device(self):
"""Test that the method for determining the best diff method
for a device that is a child of qml.devices.Device and has a
compute_derivatives method defined returns 'device'"""

dev = CustomDeviceWithDiffMethod()
qn_jax = qml.QNode(dummyfunc, dev, "jax")
qn_none = qml.QNode(dummyfunc, dev, None)

res = get_best_diff_method(qn_jax)()
assert res == "device"

res = get_best_diff_method(qn_none)()
assert res == "device"

@pytest.mark.parametrize("interface", ["jax", "tensorflow", "torch", "autograd"])
def test_best_method_is_backprop(self, interface):
"""Test that the method for determining the best diff method
for the default.qubit device and a valid interface returns back-propagation"""

dev = qml.device("default.qubit", wires=1)
qn = qml.QNode(dummyfunc, dev, interface)

# backprop is returned when the interface is an allowed interface for the device and Jacobian is not provided
res = get_best_diff_method(qn)()
assert res == "backprop"

def test_best_method_is_param_shift(self):
"""Test that the method for determining the best diff method
for a given device and interface returns the parameter shift rule if
'device' and 'backprop' don't work"""

# null device has no info - fall back on parameter-shift
dev = CustomDevice()
qn = qml.QNode(dummyfunc, dev)

res = get_best_diff_method(qn)()
assert res == "parameter-shift"

# no interface - fall back on parameter-shift
dev2 = qml.device("default.qubit", wires=1)
qn = qml.QNode(dummyfunc, dev2)
res2 = get_best_diff_method(qn)(shots=50)
assert res2 == "parameter-shift"

def test_best_method_is_param_shift_cv(self):
"""Tests that the method returns 'parameter-shift' when CV operations are in the QNode."""

dev = qml.device("default.gaussian", wires=1)
qn = qml.QNode(dummy_cv_func, dev, interface=None)

res = get_best_diff_method(qn)(0.5)
assert res == "parameter-shift"
Loading