Skip to content

Commit

Permalink
Merge branch 'main' into math411-patch-2
Browse files Browse the repository at this point in the history
  • Loading branch information
AbeCoull authored Nov 2, 2023
2 parents 6443045 + ef4c92f commit 91b86c2
Show file tree
Hide file tree
Showing 12 changed files with 90 additions and 43 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/publish-to-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ jobs:
run: python -m pip install --user --upgrade wheel
- name: Install twine
run: python -m pip install --user --upgrade twine
- name: Install setuptools
run: python -m pip install --user --upgrade setuptools
- name: Build a binary wheel and a source tarball
run: python setup.py sdist bdist_wheel
- name: Publish distribution to PyPI
Expand Down
29 changes: 29 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,34 @@
# Changelog

## v1.60.2 (2023-11-01)

### Bug Fixes and Other Changes

* drop task count for batch task tests to 3

## v1.60.1 (2023-11-01)

### Bug Fixes and Other Changes

* set python container version explicitly
* set decorator job working directory inside of function
* s3 config support for decorator jobs

## v1.60.0 (2023-10-31)

### Features

* support dependency list for decorator hybrid jobs

### Bug Fixes and Other Changes

* Don't run pulse tests when QPU offline

### Documentation Changes

* Fix some nits in the decorator doc string
* update intended audience to include education and research

## v1.59.2 (2023-10-25)

### Bug Fixes and Other Changes
Expand Down
2 changes: 2 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
Expand Down
2 changes: 1 addition & 1 deletion src/braket/_sdk/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@
Version number (major.minor.patch[-label])
"""

__version__ = "1.59.3.dev0"
__version__ = "1.60.3.dev0"
12 changes: 6 additions & 6 deletions src/braket/jobs/_entry_point_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,16 @@
from braket.jobs_data import PersistedJobDataFormat
# set working directory to results dir
os.chdir(get_results_dir())
# create symlinks to input data
links = link_input()
# load and run serialized entry point function
recovered = cloudpickle.loads({serialized})
def {function_name}():
try:
# set working directory to results dir
os.chdir(get_results_dir())
# create symlinks to input data
links = link_input()
result = recovered()
finally:
clean_links(links)
Expand Down
35 changes: 16 additions & 19 deletions src/braket/jobs/hybrid_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@ def hybrid_job(
when the decorated function is called.
The job created will be a `LocalQuantumJob` when `local` is set to `True`, otherwise an
`AwsQuantumJob. The following parameters will be ignored when running a job with
`local` set to True: `wait_until_complete`, `instance_config`, `distribution`,
`AwsQuantumJob`. The following parameters will be ignored when running a job with
`local` set to `True`: `wait_until_complete`, `instance_config`, `distribution`,
`copy_checkpoints_from_job`, `stopping_condition`, `tags`, and `logger`.
Args:
Expand All @@ -83,27 +83,27 @@ def hybrid_job(
include_modules (str | ModuleType | Iterable[str | ModuleType]): Either a
single module or module name or a list of module or module names referring to local
modules to be included. Any references to members of these modules in the hybrid job
algorithm code will be serialized as part of the algorithm code. Default value `[]`
algorithm code will be serialized as part of the algorithm code. Default: `[]`
dependencies (str | Path | list[str]): Path (absolute or relative) to a requirements.txt
file, or alternatively a list of strings, with each string being a `requirement
specifier <https://pip.pypa.io/en/stable/reference/requirement-specifiers/
#requirement-specifiers>`_, to be used for the hybrid job.
local (bool): Whether to use local mode for the hybrid job. Default `False`
local (bool): Whether to use local mode for the hybrid job. Default: `False`
job_name (str): A string that specifies the name with which the job is created.
Allowed pattern for job name: `^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,50}$`. Defaults to
f'{decorated-function-name}-{timestamp}'.
image_uri (str): A str that specifies the ECR image to use for executing the job.
`retrieve_image()` function may be used for retrieving the ECR image URIs
for the containers supported by Braket. Default = `<Braket base image_uri>`.
for the containers supported by Braket. Default: `<Braket base image_uri>`.
input_data (str | dict | S3DataSourceConfig): Information about the training
data. Dictionary maps channel names to local paths or S3 URIs. Contents found
at any local paths will be uploaded to S3 at
f's3://{default_bucket_name}/jobs/{job_name}/data/{channel_name}. If a local
f's3://{default_bucket_name}/jobs/{job_name}/data/{channel_name}'. If a local
path, S3 URI, or S3DataSourceConfig is provided, it will be given a default
channel name "input".
Default: {}.
Expand All @@ -113,23 +113,23 @@ def hybrid_job(
local mode. Default: `False`.
instance_config (InstanceConfig): Configuration of the instance(s) for running the
classical code for the hybrid job. Defaults to
classical code for the hybrid job. Default:
`InstanceConfig(instanceType='ml.m5.large', instanceCount=1, volumeSizeInGB=30)`.
distribution (str): A str that specifies how the job should be distributed.
If set to "data_parallel", the hyperparameters for the job will be set to use data
parallelism features for PyTorch or TensorFlow. Default: None.
parallelism features for PyTorch or TensorFlow. Default: `None`.
copy_checkpoints_from_job (str): A str that specifies the job ARN whose
checkpoint you want to use in the current job. Specifying this value will copy
over the checkpoint data from `use_checkpoints_from_job`'s checkpoint_config
s3Uri to the current job's checkpoint_config s3Uri, making it available at
checkpoint_config.localPath during the job execution. Default: None
checkpoint_config.localPath during the job execution. Default: `None`
checkpoint_config (CheckpointConfig): Configuration that specifies the
location where checkpoint data is stored.
Default: CheckpointConfig(localPath='/opt/jobs/checkpoints',
s3Uri=f's3://{default_bucket_name}/jobs/{job_name}/checkpoints').
Default: `CheckpointConfig(localPath='/opt/jobs/checkpoints',
s3Uri=f's3://{default_bucket_name}/jobs/{job_name}/checkpoints')`.
role_arn (str): A str providing the IAM role ARN used to execute the
script. Default: IAM role returned by AwsSession's `get_default_jobs_role()`.
Expand All @@ -140,8 +140,8 @@ def hybrid_job(
output_data_config (OutputDataConfig): Specifies the location for the output of
the job.
Default: OutputDataConfig(s3Path=f's3://{default_bucket_name}/jobs/{job_name}/data',
kmsKeyId=None).
Default: `OutputDataConfig(s3Path=f's3://{default_bucket_name}/jobs/{job_name}/data',
kmsKeyId=None)`.
aws_session (AwsSession): AwsSession for connecting to AWS Services.
Default: AwsSession()
Expand All @@ -150,7 +150,7 @@ def hybrid_job(
Default: {}.
logger (Logger): Logger object with which to write logs, such as task statuses
while waiting for task to be in a terminal state. Default is `getLogger(__name__)`
while waiting for task to be in a terminal state. Default: `getLogger(__name__)`
Returns:
Callable: the callable for creating a Hybrid Job.
Expand Down Expand Up @@ -276,10 +276,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):

def _serialize_entry_point(entry_point: Callable, args: tuple, kwargs: dict) -> str:
"""Create an entry point from a function"""

def wrapped_entry_point() -> Any:
"""Partial function wrapping entry point with given parameters"""
return entry_point(*args, **kwargs)
wrapped_entry_point = functools.partial(entry_point, *args, **kwargs)

try:
serialized = cloudpickle.dumps(wrapped_entry_point)
Expand Down Expand Up @@ -369,7 +366,7 @@ def is_prefix(path: str) -> bool:
file_channels = set()

for channel, data in input_data.items():
if AwsSession.is_s3_uri(str(data)):
if AwsSession.is_s3_uri(str(data)) or isinstance(data, S3DataSourceConfig):
channel_arg = f'channel="{channel}"' if channel != "input" else ""
print(
"Input data channels mapped to an S3 source will not be available in "
Expand Down
2 changes: 1 addition & 1 deletion test/integ_tests/job_test_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def start_here():

def failed_job_script():
print("Test job started!!!!!")
assert 0
open("fake_file")


def completed_job_script():
Expand Down
23 changes: 13 additions & 10 deletions test/integ_tests/test_create_quantum_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,18 @@
import pytest
from job_test_module.job_test_submodule.job_test_submodule_file import submodule_helper

from braket.aws import AwsSession
from braket.aws.aws_quantum_job import AwsQuantumJob
from braket.devices import Devices
from braket.jobs import Framework, get_input_data_dir, hybrid_job, retrieve_image, save_job_result


@pytest.fixture
def decorator_python_version(aws_session):
def decorator_python_version():
aws_session = AwsSession()
image_uri = retrieve_image(Framework.BASE, aws_session.region)
tag = aws_session.get_full_image_tag(image_uri)
major_version, minor_version = re.search(r"-py(\d)(\d+)-", tag).groups()
return major_version, minor_version
return int(major_version), int(minor_version)


def test_failed_quantum_job(aws_session, capsys):
Expand Down Expand Up @@ -77,16 +78,17 @@ def test_failed_quantum_job(aws_session, capsys):
"braket_container.py",
"Running Code As Process",
"Test job started!!!!!",
"AssertionError",
"FileNotFoundError: [Errno 2] No such file or directory: 'fake_file'",
"Code Run Finished",
'"user_entry_point": "braket_container.py"',
]

for data in logs_to_validate:
assert data in log_data

assert job.metadata()["failureReason"].startswith(
"AlgorithmError: Job at job_test_script:start_here"
assert job.metadata()["failureReason"] == (
"AlgorithmError: FileNotFoundError: [Errno 2] "
"No such file or directory: 'fake_file', exit code: 1"
)


Expand Down Expand Up @@ -199,7 +201,7 @@ def test_completed_quantum_job(aws_session, capsys):


@pytest.mark.xfail(
(sys.version_info.major, sys.version_info.minor) != decorator_python_version,
(sys.version_info.major, sys.version_info.minor) != decorator_python_version(),
raises=RuntimeError,
reason="Python version mismatch",
)
Expand All @@ -217,7 +219,6 @@ def __str__(self):
input_data=str(Path("test", "integ_tests", "requirements")),
)
def decorator_job(a, b: int, c=0, d: float = 1.0, **extras):
save_job_result(job_test_script.job_helper())
with open(Path(get_input_data_dir()) / "requirements.txt", "r") as f:
assert f.readlines() == ["pytest\n"]
with open(Path("test", "integ_tests", "requirements.txt"), "r") as f:
Expand All @@ -243,6 +244,8 @@ def decorator_job(a, b: int, c=0, d: float = 1.0, **extras):
with open("test/output_file.txt", "w") as f:
f.write("hello")

return job_test_script.job_helper()

job = decorator_job(MyClass(), 2, d=5, extra_arg="extra_value")
assert job.result()["status"] == "SUCCESS"

Expand All @@ -263,7 +266,7 @@ def decorator_job(a, b: int, c=0, d: float = 1.0, **extras):


@pytest.mark.xfail(
(sys.version_info.major, sys.version_info.minor) != decorator_python_version,
(sys.version_info.major, sys.version_info.minor) != decorator_python_version(),
raises=RuntimeError,
reason="Python version mismatch",
)
Expand All @@ -282,7 +285,6 @@ def test_decorator_job_submodule():
},
)
def decorator_job_submodule():
save_job_result(submodule_helper())
with open(Path(get_input_data_dir("my_input")) / "requirements.txt", "r") as f:
assert f.readlines() == ["pytest\n"]
with open(Path("test", "integ_tests", "requirements.txt"), "r") as f:
Expand All @@ -303,6 +305,7 @@ def decorator_job_submodule():
) as f:
assert f.readlines() == ["pytest\n"]
assert dir(pytest)
save_job_result(submodule_helper())

job = decorator_job_submodule()
assert job.result()["status"] == "SUCCESS"
6 changes: 6 additions & 0 deletions test/integ_tests/test_pulse.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,8 @@ def cz_pulse(


def test_pulse_bell(arbitrary_waveform, device):
if device.status == "OFFLINE":
pytest.skip("Device offline")
(
a,
b,
Expand Down Expand Up @@ -258,6 +260,8 @@ def test_pulse_bell(arbitrary_waveform, device):


def test_pulse_sequence(arbitrary_waveform, device):
if device.status == "OFFLINE":
pytest.skip("Device offline")
(
a,
b,
Expand Down Expand Up @@ -310,6 +314,8 @@ def test_pulse_sequence(arbitrary_waveform, device):


def test_gate_calibration_run(device, pulse_sequence):
if device.status == "OFFLINE":
pytest.skip("Device offline")
user_gate_calibrations = GateCalibrations({(Gate.Rx(math.pi / 2), QubitSet(0)): pulse_sequence})
num_shots = 50
bell_circuit = Circuit().rx(0, math.pi / 2).rx(1, math.pi / 2).cz(0, 1).rx(1, -math.pi / 2)
Expand Down
1 change: 1 addition & 0 deletions test/unit_tests/braket/aws/test_aws_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -1673,6 +1673,7 @@ def test_get_devices_simulators_only(mock_copy_session, aws_session):
assert [result.name for result in results] == ["SV1"]


@pytest.mark.filterwarnings("ignore:Test Code:")
@patch("braket.aws.aws_device.AwsSession.copy_session")
def test_get_devices_with_error_in_region(mock_copy_session, aws_session):
aws_session.search_devices.side_effect = [
Expand Down
10 changes: 5 additions & 5 deletions test/unit_tests/braket/devices/test_local_simulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ def test_batch_circuit():
theta = FreeParameter("theta")
task = Circuit().rx(angle=theta, target=0)
device = LocalSimulator(dummy)
num_tasks = 10
num_tasks = 3
circuits = [task for _ in range(num_tasks)]
inputs = [{"theta": i} for i in range(num_tasks)]
batch = device.run_batch(circuits, inputs=inputs, shots=10)
Expand All @@ -329,7 +329,7 @@ def test_batch_with_max_parallel():
dummy = DummyProgramSimulator()
task = Circuit().h(0).cnot(0, 1)
device = LocalSimulator(dummy)
num_tasks = 10
num_tasks = 3
circuits = [task for _ in range(num_tasks)]
batch = device.run_batch(circuits, shots=10, max_parallel=2)
assert len(batch.results()) == num_tasks
Expand All @@ -341,7 +341,7 @@ def test_batch_with_annealing_problems():
dummy = DummyAnnealingSimulator()
problem = Problem(ProblemType.ISING)
device = LocalSimulator(dummy)
num_tasks = 10
num_tasks = 3
problems = [problem for _ in range(num_tasks)]
batch = device.run_batch(problems, shots=10)
assert len(batch.results()) == num_tasks
Expand All @@ -353,7 +353,7 @@ def test_batch_circuit_without_inputs():
dummy = DummyProgramSimulator()
bell = Circuit().h(0).cnot(0, 1)
device = LocalSimulator(dummy)
num_tasks = 10
num_tasks = 3
circuits = [bell for _ in range(num_tasks)]
batch = device.run_batch(circuits, shots=10)
assert len(batch.results()) == num_tasks
Expand Down Expand Up @@ -385,7 +385,7 @@ def test_batch_circuit_with_task_and_input_mismatch():
dummy = DummyProgramSimulator()
bell = Circuit().h(0).cnot(0, 1)
device = LocalSimulator(dummy)
num_tasks = 10
num_tasks = 3
circuits = [bell for _ in range(num_tasks)]
inputs = [{} for _ in range(num_tasks - 1)]
with pytest.raises(ValueError):
Expand Down
Loading

0 comments on commit 91b86c2

Please sign in to comment.