Skip to content

Commit

Permalink
double precision finite diff
Browse files Browse the repository at this point in the history
  • Loading branch information
dominikandreasseitz committed Nov 7, 2023
1 parent 956cc6a commit c2e78ae
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion qadence/backends/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from qadence.utils import Endianness, int_to_basis

FINITE_DIFF_EPS = 1e-06
# Dict of NumPy dtype -> torch dtype (when the correspondence exists)
numpy_to_torch_dtype_dict = {
np.bool_: torch.bool,
Expand Down Expand Up @@ -93,7 +94,7 @@ def to_list_of_dicts(param_values: dict[str, Tensor]) -> list[dict[str, float]]:
return [{k: v[i] for k, v in batched_values.items()} for i in range(max_batch_size)]


def finitediff(f: Callable, x: torch.Tensor, eps: float = 1e-4) -> torch.Tensor:
def finitediff(f: Callable, x: torch.Tensor, eps: float = FINITE_DIFF_EPS) -> torch.Tensor:
return (f(x + eps) - f(x - eps)) / (2 * eps) # type: ignore


Expand Down

0 comments on commit c2e78ae

Please sign in to comment.