Skip to content

Commit

Permalink
Merge branch 'main' into ruff_test
Browse files Browse the repository at this point in the history
  • Loading branch information
AbeCoull authored Feb 18, 2025
2 parents 27da2f3 + ed6b849 commit 6e8adc6
Show file tree
Hide file tree
Showing 14 changed files with 358 additions and 39 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/check-format.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ jobs:
check-code-format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Python
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
with:
python-version: '3.9'
- name: Install dependencies
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/dependent-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ jobs:
- amazon-braket-pennylane-plugin-python

steps:
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/publish-to-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ jobs:
name: Build and publish distribution to PyPi
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Python
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
with:
python-version: '3.x'
- name: Install wheel
Expand All @@ -26,6 +26,6 @@ jobs:
- name: Build a binary wheel and a source tarball
run: python setup.py sdist bdist_wheel
- name: Publish distribution to PyPI
uses: pypa/gh-action-pypi-publish@15c56dba361d8335944d31a2ecd17d700fc7bcbc # release/v1
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # release/v1
with:
password: ${{ secrets.pypi_token }}
6 changes: 3 additions & 3 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ jobs:
python-version: ["3.9", "3.10", "3.11"]

steps:
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
Expand All @@ -36,7 +36,7 @@ jobs:
run: |
tox -e unit-tests
- name: Upload coverage report to Codecov
uses: codecov/codecov-action@015f24e6818733317a2da2edd6290ab26238649a # v5.0.7
uses: codecov/codecov-action@13ce06bfc6bbe3ecf90edbbf1bc32fe5978ca1d3 # v5.3.1
with:
token: ${{ secrets.CODECOV_TOKEN }}
if: ${{ strategy.job-index }} == 0
4 changes: 2 additions & 2 deletions .github/workflows/twine-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ jobs:
name: Check long description
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Python
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
with:
python-version: '3.x'
- name: Install wheel
Expand Down
22 changes: 22 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,27 @@
# Changelog

## v1.90.0 (2025-02-14)

### Features

* added ankaa-3 to enum

## v1.89.1 (2025-02-10)

### Bug Fixes and Other Changes

* decorator job with no inner function

## v1.89.0 (2025-02-10)

### Deprecations and Removals

* Ankaa-2

### Features

* support CUDA-Q decorator kernel with hybrid job decorator

## v1.88.3 (2024-12-06)

### Bug Fixes and Other Changes
Expand Down
2 changes: 1 addition & 1 deletion src/braket/_sdk/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@
Version number (major.minor.patch[-label])
"""

__version__ = "1.88.4.dev0"
__version__ = "1.90.1.dev0"
3 changes: 2 additions & 1 deletion src/braket/devices/devices.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ class _Rigetti(str, Enum):
_AspenM1 = "arn:aws:braket:us-west-1::device/qpu/rigetti/Aspen-M-1"
_AspenM2 = "arn:aws:braket:us-west-1::device/qpu/rigetti/Aspen-M-2"
_AspenM3 = "arn:aws:braket:us-west-1::device/qpu/rigetti/Aspen-M-3"
Ankaa2 = "arn:aws:braket:us-west-1::device/qpu/rigetti/Ankaa-2"
_Ankaa2 = "arn:aws:braket:us-west-1::device/qpu/rigetti/Ankaa-2"
Ankaa3 = "arn:aws:braket:us-west-1::device/qpu/rigetti/Ankaa-3"

class _Xanadu(str, Enum):
_Borealis = "arn:aws:braket:us-east-1::device/qpu/xanadu/Borealis"
Expand Down
131 changes: 129 additions & 2 deletions src/braket/jobs/hybrid_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,17 @@
import functools
import importlib.util
import inspect
import os
import re
import shutil
import sys
import tempfile
import warnings
from collections.abc import Callable, Iterable
from contextlib import contextmanager
from logging import Logger, getLogger
from pathlib import Path
from types import ModuleType
from types import CodeType, ModuleType
from typing import Any

import cloudpickle
Expand All @@ -39,9 +41,14 @@
StoppingCondition,
)
from braket.jobs.image_uris import Framework, built_in_images, retrieve_image
from braket.jobs.local.local_job_container_setup import _get_env_input_data
from braket.jobs.quantum_job import QuantumJob
from braket.jobs.quantum_job_creation import _generate_default_job_name

DEFAULT_INPUT_CHANNEL = "input"
INNER_FUNCTION_SOURCE_INPUT_CHANNEL = "_braket_job_decorator_inner_function_source"
INNER_FUNCTION_SOURCE_INPUT_FOLDER = "_inner_function_source_folder"


def hybrid_job(
*,
Expand Down Expand Up @@ -74,6 +81,12 @@ def hybrid_job(
`local` set to `True`: `wait_until_complete`, `instance_config`, `distribution`,
`copy_checkpoints_from_job`, `stopping_condition`, `tags`, `logger`, and `quiet`.
Remarks:
Hybrid jobs created using this decorator have limited access to the source code of
functions defined outside of the decorated function. Functionality that depends on
source code analysis may not work properly when referencing functions defined outside
of the decorated function.
Args:
device (str | None): Device ARN of the QPU device that receives priority quantum
task queueing once the hybrid job begins running. Each QPU has a separate hybrid jobs
Expand Down Expand Up @@ -181,7 +194,13 @@ def job_wrapper(*args: Any, **kwargs: Any) -> Callable:
with (
_IncludeModules(include_modules),
tempfile.TemporaryDirectory(dir="", prefix="decorator_job_") as temp_dir,
persist_inner_function_source(entry_point) as inner_source_input,
):

job_input_data = _add_inner_function_source_to_input_data(
input_data, inner_source_input
)

temp_dir_path = Path(temp_dir)
entry_point_file_path = Path("entry_point.py")
with open(
Expand Down Expand Up @@ -209,7 +228,7 @@ def job_wrapper(*args: Any, **kwargs: Any) -> Callable:
}
optional_args = {
"image_uri": image_uri,
"input_data": input_data,
"input_data": job_input_data,
"instance_config": instance_config,
"distribution": distribution,
"checkpoint_config": checkpoint_config,
Expand All @@ -230,6 +249,114 @@ def job_wrapper(*args: Any, **kwargs: Any) -> Callable:
return _hybrid_job


@contextmanager
def persist_inner_function_source(entry_point: callable) -> None:
"""Persist the source code of the cloudpickled function by saving its source code as input data
and replace the source file path with the saved one.
Args:
entry_point (callable): The job decorated function.
"""
inner_source_mapping = _get_inner_function_source(entry_point.__code__)

if len(inner_source_mapping) == 0:
yield {}
else:
with tempfile.TemporaryDirectory(dir="", prefix="decorator_job_inner_source_") as temp_dir:
copy_dir = f"{temp_dir}/{INNER_FUNCTION_SOURCE_INPUT_FOLDER}"
os.mkdir(copy_dir)
path_mapping = _save_inner_source_to_file(inner_source_mapping, copy_dir)
entry_point.__code__ = _replace_inner_function_source_path(
entry_point.__code__, path_mapping
)
yield {INNER_FUNCTION_SOURCE_INPUT_CHANNEL: copy_dir}


def _replace_inner_function_source_path(
code_object: CodeType, path_mapping: dict[str, str]
) -> CodeType:
"""Recursively replace source code file path of the code object and of its child node's code
objects.
Args:
code_object (CodeType): Code object which source code file path to be replaced.
path_mapping (dict[str, str]): Mapping between local file path to path in a job
environment.
Returns:
CodeType: Code object with the source code file path replaced
"""
new_co_consts = []
for const in code_object.co_consts:
if inspect.iscode(const):
new_path = path_mapping[const.co_filename]
const = const.replace(co_filename=new_path)
const = _replace_inner_function_source_path(const, path_mapping)
new_co_consts.append(const)

code_object = code_object.replace(co_consts=tuple(new_co_consts))
return code_object


def _save_inner_source_to_file(inner_source: dict[str, str], input_data_dir: str) -> dict[str, str]:
"""Saves the source code as input data for a job and returns a dictionary that maps the local
source file path of a function to the one to be used in the job environment.
Args:
inner_source (dict[str, str]): Mapping between source file name and source code.
input_data_dir (str): The path of the folder to be uploaded to job as input data.
Returns:
dict[str, str]: Mapping between local file path to path in a job environment.
"""
path_mapping = {}
for i, (local_path, source_code) in enumerate(inner_source.items()):
copy_file_name = f"source_{i}.py"
with open(f"{input_data_dir}/{copy_file_name}", "w") as f:
f.write(source_code)

path_mapping[local_path] = os.path.join(
_get_env_input_data()["AMZN_BRAKET_INPUT_DIR"],
INNER_FUNCTION_SOURCE_INPUT_CHANNEL,
copy_file_name,
)
return path_mapping


def _get_inner_function_source(code_object: CodeType) -> dict[str, str]:
"""Returns a dictionary that maps the source file name to source code for all source files
used by the inner functions inside the job decorated function.
Args:
code_object (CodeType): Code object of a inner function.
Returns:
dict[str, str]: Mapping between source file name and source code.
"""
inner_source = {}
for const in code_object.co_consts:
if inspect.iscode(const):
source_file_path = inspect.getfile(code_object)
lines, _ = inspect.findsource(code_object)
inner_source.update({source_file_path: "".join(lines)})
inner_source.update(_get_inner_function_source(const))
return inner_source


def _add_inner_function_source_to_input_data(input_data: dict, inner_source_input: dict) -> dict:
"""Add the path of inner function source file as the input data of the job.
Args:
input_data (dict): Provided input data of the job.
inner_source_input (dict): A dict that points to the path of inner function source file.
Returns:
dict: input_data with inner function source file added.
"""
if input_data is None:
job_input_data = inner_source_input
elif isinstance(input_data, dict):
if INNER_FUNCTION_SOURCE_INPUT_CHANNEL in input_data:
raise ValueError(f"input channel cannot be {INNER_FUNCTION_SOURCE_INPUT_CHANNEL}")
job_input_data = {**input_data, **inner_source_input}
else:
job_input_data = {DEFAULT_INPUT_CHANNEL: input_data, **inner_source_input}
return job_input_data


def _validate_python_version(image_uri: str | None, aws_session: AwsSession | None = None) -> None:
"""Validate python version at job definition time"""
aws_session = aws_session or AwsSession()
Expand Down
6 changes: 3 additions & 3 deletions test/integ_tests/test_device_creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def test_device_creation(arn, created_braket_devices):
assert device.properties


@pytest.mark.parametrize("arn", [(PULSE_ARN)])
@pytest.mark.parametrize("arn", [PULSE_ARN])
def test_device_pulse_properties(arn, aws_session, created_braket_devices):
device = created_braket_devices[arn]
assert device.ports
Expand All @@ -51,13 +51,13 @@ def test_device_across_regions(aws_session, created_braket_devices):
created_braket_devices[IQM_ARN]


@pytest.mark.parametrize("arn", [(RIGETTI_ARN), (IONQ_ARN), (IQM_ARN), (SIMULATOR_ARN)])
@pytest.mark.parametrize("arn", [(IONQ_ARN), (IQM_ARN), (SIMULATOR_ARN)])
def test_get_devices_arn(arn):
results = AwsDevice.get_devices(arns=[arn])
assert results[0].arn == arn


@pytest.mark.parametrize("arn", [(PULSE_ARN)])
@pytest.mark.parametrize("arn", [PULSE_ARN])
def test_device_gate_calibrations(arn, aws_session, created_braket_devices):
device = created_braket_devices[arn]
assert device.gate_calibrations
Expand Down
6 changes: 3 additions & 3 deletions test/integ_tests/test_measure.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@
@pytest.mark.parametrize("arn", [(IONQ_ARN), (SIMULATOR_ARN)])
def test_unsupported_devices(arn):
device = AwsDevice(arn)
if device.status == "OFFLINE":
pytest.skip("Device offline")
if device.status != "ONLINE":
pytest.skip("Device not online")

circ = Circuit().h(0).cnot(0, 1).h(2).measure([0, 1])
error_string = re.escape(
Expand All @@ -53,7 +53,7 @@ def test_measure_on_local_sim(sim):
assert result.measured_qubits == [0, 1]


@pytest.mark.parametrize("arn", [(IQM_ARN)])
@pytest.mark.parametrize("arn", [IQM_ARN])
def test_measure_on_supported_devices(arn):
device = AwsDevice(arn)
if not device.is_available:
Expand Down
12 changes: 6 additions & 6 deletions test/integ_tests/test_pulse.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,8 +200,8 @@ def make_pulse(


def test_pulse_bell(arbitrary_waveform, device):
if device.status == "OFFLINE":
pytest.skip("Device offline")
if device.status != "ONLINE":
pytest.skip("Device not online")
(
a,
b,
Expand Down Expand Up @@ -250,8 +250,8 @@ def test_pulse_bell(arbitrary_waveform, device):


def test_pulse_sequence(arbitrary_waveform, device):
if device.status == "OFFLINE":
pytest.skip("Device offline")
if device.status != "ONLINE":
pytest.skip("Device not online")
(
a,
b,
Expand Down Expand Up @@ -305,8 +305,8 @@ def test_pulse_sequence(arbitrary_waveform, device):

@pytest.mark.skip(reason="needs to be updated to work correctly on Ankaa-2")
def test_gate_calibration_run(device, pulse_sequence):
if device.status == "OFFLINE":
pytest.skip("Device offline")
if device.status != "ONLINE":
pytest.skip("Device not online")
user_gate_calibrations = GateCalibrations({(Gate.Rx(math.pi / 2), QubitSet(0)): pulse_sequence})
num_shots = 50
bell_circuit = Circuit().rx(0, math.pi / 2).rx(1, math.pi / 2).iswap(0, 1).rx(1, -math.pi / 2)
Expand Down
2 changes: 1 addition & 1 deletion test/integ_tests/test_reservation_arn.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def test_create_job_with_decorator_via_invalid_reservation_arn(reservation_arn):
with pytest.raises(ClientError, match="Reservation arn is invalid"):

@hybrid_job(
device=Devices.IonQ.Aria1,
device=Devices.IQM.Garnet,
reservation_arn=reservation_arn,
)
def hello_job():
Expand Down
Loading

0 comments on commit 6e8adc6

Please sign in to comment.