Skip to content

Commit

Permalink
Implement unitary AQC
Browse files Browse the repository at this point in the history
  • Loading branch information
garrison committed Nov 27, 2024
1 parent 133fd2d commit fcbe4ef
Show file tree
Hide file tree
Showing 2 changed files with 73 additions and 1 deletion.
38 changes: 38 additions & 0 deletions qiskit_addon_aqc_tensor/objective.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,45 @@ def target(self) -> TensorNetworkState:
return self._target_tensornetwork


class MaximizeProcessFidelity:
"""Maximize process fidelity."""

def __init__(self, target, ansatz: QuantumCircuit, settings: TensorNetworkSimulationSettings):
"""Initialize the objective function.
Args:
ansatz: Parametrized ansatz circuit.
target: Target state in tensor-network representation.
settings: Tensor network simulation settings.
"""
if ansatz is not None:
from .ansatz_generation import AnsatzBlock

ansatz = ansatz.decompose(AnsatzBlock)
self._ansatz = ansatz
self._simulation_settings = settings
self._target_tensornetwork = target
if settings is not None:
from .simulation.abstract import _preprocess_for_gradient

self._preprocessed = _preprocess_for_gradient(self, settings)

def loss_function(self, x: np.ndarray) -> tuple[float, np.ndarray]:
"""Evaluate ``(objective_value, gradient)`` of function at point ``x``."""
from .simulation.abstract import _compute_objective_and_gradient

return _compute_objective_and_gradient(
self, self._simulation_settings, self._preprocessed, x
)

@property
def target(self) -> TensorNetworkState:
"""Target tensor network."""
return self._target_tensornetwork


# Reminder: update the RST file in docs/apidocs when adding new interfaces.
__all__ = [
"OneMinusFidelity",
"MaximizeProcessFidelity",
]
36 changes: 35 additions & 1 deletion qiskit_addon_aqc_tensor/simulation/quimb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

"""Quimb as a tensor network backend."""

# ruff: noqa: F811
from __future__ import annotations

import logging
Expand All @@ -27,7 +28,7 @@
from wrapt import register_post_import_hook

from ...ansatz_generation import AnsatzBlock
from ...objective import OneMinusFidelity
from ...objective import MaximizeProcessFidelity, OneMinusFidelity
from ..abstract import TensorNetworkSimulationSettings
from ..explicit_gradient import (
compute_gradient_of_tensornetwork_overlap,
Expand Down Expand Up @@ -405,6 +406,39 @@ def oneminusfidelity_loss_fn(
return 1 - fidelity


@dispatch
def tnoptimizer_objective_kwargs(objective: MaximizeProcessFidelity, /) -> dict[str, Any]:
"""Return keyword arguments for use with :func:`~quimb.tensor.TNOptimizer`.
- ``loss_fn``
- ``loss_kwargs``
"""
import quimb.tensor as qtn

target = objective.target
if isinstance(target, qtn.Circuit):
target = target.psi
return {
"loss_fn": maximizeprocessfidelity_loss_fn,
"loss_kwargs": {"target": target},
}


def maximizeprocessfidelity_loss_fn(
circ: quimb.tensor.Circuit,
/,
*,
target: quimb.tensor.TensorNetworkGenVector,
optimize="auto-hq",
):
import autoray as ar

return (
1
- ar.do("abs", (circ.uni.H & target).contract(all, optimize=optimize)) / 2.0**target.nsites
)


# Reminder: update the RST file in docs/apidocs when adding new interfaces.
__all__ = [
"is_quimb_available",
Expand Down

0 comments on commit fcbe4ef

Please sign in to comment.