diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml index a15240835..10aef9c29 100644 --- a/.github/workflows/publish-to-pypi.yml +++ b/.github/workflows/publish-to-pypi.yml @@ -21,6 +21,8 @@ jobs: run: python -m pip install --user --upgrade wheel - name: Install twine run: python -m pip install --user --upgrade twine + - name: Install setuptools + run: python -m pip install --user --upgrade setuptools - name: Build a binary wheel and a source tarball run: python setup.py sdist bdist_wheel - name: Publish distribution to PyPI diff --git a/CHANGELOG.md b/CHANGELOG.md index a96eff9e6..7a17eb6ba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## v1.60.2 (2023-11-01) + +### Bug Fixes and Other Changes + + * drop task count for batch task tests to 3 + +## v1.60.1 (2023-11-01) + +### Bug Fixes and Other Changes + + * set python container version explicitly + * set decorator job working directory inside of function + * s3 config support for decorator jobs + +## v1.60.0 (2023-10-31) + +### Features + + * support dependency list for decorator hybrid jobs + +### Bug Fixes and Other Changes + + * Don't run pulse tests when QPU offline + +### Documentation Changes + + * Fix some nits in the decorator doc string + * update intended audience to include education and research + ## v1.59.2 (2023-10-25) ### Bug Fixes and Other Changes diff --git a/setup.py b/setup.py index d3724c59c..f15777485 100644 --- a/setup.py +++ b/setup.py @@ -74,6 +74,8 @@ classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", + "Intended Audience :: Education", + "Intended Audience :: Science/Research", "Natural Language :: English", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", diff --git a/src/braket/_sdk/_version.py b/src/braket/_sdk/_version.py index 3ff3dc46f..20b348113 100644 --- a/src/braket/_sdk/_version.py +++ b/src/braket/_sdk/_version.py @@ -15,4 +15,4 @@ Version number (major.minor.patch[-label]) """ -__version__ = "1.59.3.dev0" +__version__ = "1.60.3.dev0" diff --git a/src/braket/jobs/_entry_point_template.py b/src/braket/jobs/_entry_point_template.py index 285e4d85e..dceea98d1 100644 --- a/src/braket/jobs/_entry_point_template.py +++ b/src/braket/jobs/_entry_point_template.py @@ -5,16 +5,16 @@ from braket.jobs_data import PersistedJobDataFormat -# set working directory to results dir -os.chdir(get_results_dir()) - -# create symlinks to input data -links = link_input() - # load and run serialized entry point function recovered = cloudpickle.loads({serialized}) def {function_name}(): try: + # set working directory to results dir + os.chdir(get_results_dir()) + + # create symlinks to input data + links = link_input() + result = recovered() finally: clean_links(links) diff --git a/src/braket/jobs/hybrid_job.py b/src/braket/jobs/hybrid_job.py index 0deca7ef3..da0b8436a 100644 --- a/src/braket/jobs/hybrid_job.py +++ b/src/braket/jobs/hybrid_job.py @@ -68,8 +68,8 @@ def hybrid_job( when the decorated function is called. The job created will be a `LocalQuantumJob` when `local` is set to `True`, otherwise an - `AwsQuantumJob. The following parameters will be ignored when running a job with - `local` set to True: `wait_until_complete`, `instance_config`, `distribution`, + `AwsQuantumJob`. The following parameters will be ignored when running a job with + `local` set to `True`: `wait_until_complete`, `instance_config`, `distribution`, `copy_checkpoints_from_job`, `stopping_condition`, `tags`, and `logger`. Args: @@ -83,14 +83,14 @@ def hybrid_job( include_modules (str | ModuleType | Iterable[str | ModuleType]): Either a single module or module name or a list of module or module names referring to local modules to be included. Any references to members of these modules in the hybrid job - algorithm code will be serialized as part of the algorithm code. Default value `[]` + algorithm code will be serialized as part of the algorithm code. Default: `[]` dependencies (str | Path | list[str]): Path (absolute or relative) to a requirements.txt file, or alternatively a list of strings, with each string being a `requirement specifier `_, to be used for the hybrid job. - local (bool): Whether to use local mode for the hybrid job. Default `False` + local (bool): Whether to use local mode for the hybrid job. Default: `False` job_name (str): A string that specifies the name with which the job is created. Allowed pattern for job name: `^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,50}$`. Defaults to @@ -98,12 +98,12 @@ def hybrid_job( image_uri (str): A str that specifies the ECR image to use for executing the job. `retrieve_image()` function may be used for retrieving the ECR image URIs - for the containers supported by Braket. Default = ``. + for the containers supported by Braket. Default: ``. input_data (str | dict | S3DataSourceConfig): Information about the training data. Dictionary maps channel names to local paths or S3 URIs. Contents found at any local paths will be uploaded to S3 at - f's3://{default_bucket_name}/jobs/{job_name}/data/{channel_name}. If a local + f's3://{default_bucket_name}/jobs/{job_name}/data/{channel_name}'. If a local path, S3 URI, or S3DataSourceConfig is provided, it will be given a default channel name "input". Default: {}. @@ -113,23 +113,23 @@ def hybrid_job( local mode. Default: `False`. instance_config (InstanceConfig): Configuration of the instance(s) for running the - classical code for the hybrid job. Defaults to + classical code for the hybrid job. Default: `InstanceConfig(instanceType='ml.m5.large', instanceCount=1, volumeSizeInGB=30)`. distribution (str): A str that specifies how the job should be distributed. If set to "data_parallel", the hyperparameters for the job will be set to use data - parallelism features for PyTorch or TensorFlow. Default: None. + parallelism features for PyTorch or TensorFlow. Default: `None`. copy_checkpoints_from_job (str): A str that specifies the job ARN whose checkpoint you want to use in the current job. Specifying this value will copy over the checkpoint data from `use_checkpoints_from_job`'s checkpoint_config s3Uri to the current job's checkpoint_config s3Uri, making it available at - checkpoint_config.localPath during the job execution. Default: None + checkpoint_config.localPath during the job execution. Default: `None` checkpoint_config (CheckpointConfig): Configuration that specifies the location where checkpoint data is stored. - Default: CheckpointConfig(localPath='/opt/jobs/checkpoints', - s3Uri=f's3://{default_bucket_name}/jobs/{job_name}/checkpoints'). + Default: `CheckpointConfig(localPath='/opt/jobs/checkpoints', + s3Uri=f's3://{default_bucket_name}/jobs/{job_name}/checkpoints')`. role_arn (str): A str providing the IAM role ARN used to execute the script. Default: IAM role returned by AwsSession's `get_default_jobs_role()`. @@ -140,8 +140,8 @@ def hybrid_job( output_data_config (OutputDataConfig): Specifies the location for the output of the job. - Default: OutputDataConfig(s3Path=f's3://{default_bucket_name}/jobs/{job_name}/data', - kmsKeyId=None). + Default: `OutputDataConfig(s3Path=f's3://{default_bucket_name}/jobs/{job_name}/data', + kmsKeyId=None)`. aws_session (AwsSession): AwsSession for connecting to AWS Services. Default: AwsSession() @@ -150,7 +150,7 @@ def hybrid_job( Default: {}. logger (Logger): Logger object with which to write logs, such as task statuses - while waiting for task to be in a terminal state. Default is `getLogger(__name__)` + while waiting for task to be in a terminal state. Default: `getLogger(__name__)` Returns: Callable: the callable for creating a Hybrid Job. @@ -276,10 +276,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def _serialize_entry_point(entry_point: Callable, args: tuple, kwargs: dict) -> str: """Create an entry point from a function""" - - def wrapped_entry_point() -> Any: - """Partial function wrapping entry point with given parameters""" - return entry_point(*args, **kwargs) + wrapped_entry_point = functools.partial(entry_point, *args, **kwargs) try: serialized = cloudpickle.dumps(wrapped_entry_point) @@ -369,7 +366,7 @@ def is_prefix(path: str) -> bool: file_channels = set() for channel, data in input_data.items(): - if AwsSession.is_s3_uri(str(data)): + if AwsSession.is_s3_uri(str(data)) or isinstance(data, S3DataSourceConfig): channel_arg = f'channel="{channel}"' if channel != "input" else "" print( "Input data channels mapped to an S3 source will not be available in " diff --git a/test/integ_tests/job_test_script.py b/test/integ_tests/job_test_script.py index 8bc3b92cd..95b890d60 100644 --- a/test/integ_tests/job_test_script.py +++ b/test/integ_tests/job_test_script.py @@ -33,7 +33,7 @@ def start_here(): def failed_job_script(): print("Test job started!!!!!") - assert 0 + open("fake_file") def completed_job_script(): diff --git a/test/integ_tests/test_create_quantum_job.py b/test/integ_tests/test_create_quantum_job.py index 640cb1688..3b1b8ae95 100644 --- a/test/integ_tests/test_create_quantum_job.py +++ b/test/integ_tests/test_create_quantum_job.py @@ -22,17 +22,18 @@ import pytest from job_test_module.job_test_submodule.job_test_submodule_file import submodule_helper +from braket.aws import AwsSession from braket.aws.aws_quantum_job import AwsQuantumJob from braket.devices import Devices from braket.jobs import Framework, get_input_data_dir, hybrid_job, retrieve_image, save_job_result -@pytest.fixture -def decorator_python_version(aws_session): +def decorator_python_version(): + aws_session = AwsSession() image_uri = retrieve_image(Framework.BASE, aws_session.region) tag = aws_session.get_full_image_tag(image_uri) major_version, minor_version = re.search(r"-py(\d)(\d+)-", tag).groups() - return major_version, minor_version + return int(major_version), int(minor_version) def test_failed_quantum_job(aws_session, capsys): @@ -77,7 +78,7 @@ def test_failed_quantum_job(aws_session, capsys): "braket_container.py", "Running Code As Process", "Test job started!!!!!", - "AssertionError", + "FileNotFoundError: [Errno 2] No such file or directory: 'fake_file'", "Code Run Finished", '"user_entry_point": "braket_container.py"', ] @@ -85,8 +86,9 @@ def test_failed_quantum_job(aws_session, capsys): for data in logs_to_validate: assert data in log_data - assert job.metadata()["failureReason"].startswith( - "AlgorithmError: Job at job_test_script:start_here" + assert job.metadata()["failureReason"] == ( + "AlgorithmError: FileNotFoundError: [Errno 2] " + "No such file or directory: 'fake_file', exit code: 1" ) @@ -199,7 +201,7 @@ def test_completed_quantum_job(aws_session, capsys): @pytest.mark.xfail( - (sys.version_info.major, sys.version_info.minor) != decorator_python_version, + (sys.version_info.major, sys.version_info.minor) != decorator_python_version(), raises=RuntimeError, reason="Python version mismatch", ) @@ -217,7 +219,6 @@ def __str__(self): input_data=str(Path("test", "integ_tests", "requirements")), ) def decorator_job(a, b: int, c=0, d: float = 1.0, **extras): - save_job_result(job_test_script.job_helper()) with open(Path(get_input_data_dir()) / "requirements.txt", "r") as f: assert f.readlines() == ["pytest\n"] with open(Path("test", "integ_tests", "requirements.txt"), "r") as f: @@ -243,6 +244,8 @@ def decorator_job(a, b: int, c=0, d: float = 1.0, **extras): with open("test/output_file.txt", "w") as f: f.write("hello") + return job_test_script.job_helper() + job = decorator_job(MyClass(), 2, d=5, extra_arg="extra_value") assert job.result()["status"] == "SUCCESS" @@ -263,7 +266,7 @@ def decorator_job(a, b: int, c=0, d: float = 1.0, **extras): @pytest.mark.xfail( - (sys.version_info.major, sys.version_info.minor) != decorator_python_version, + (sys.version_info.major, sys.version_info.minor) != decorator_python_version(), raises=RuntimeError, reason="Python version mismatch", ) @@ -282,7 +285,6 @@ def test_decorator_job_submodule(): }, ) def decorator_job_submodule(): - save_job_result(submodule_helper()) with open(Path(get_input_data_dir("my_input")) / "requirements.txt", "r") as f: assert f.readlines() == ["pytest\n"] with open(Path("test", "integ_tests", "requirements.txt"), "r") as f: @@ -303,6 +305,7 @@ def decorator_job_submodule(): ) as f: assert f.readlines() == ["pytest\n"] assert dir(pytest) + save_job_result(submodule_helper()) job = decorator_job_submodule() assert job.result()["status"] == "SUCCESS" diff --git a/test/integ_tests/test_pulse.py b/test/integ_tests/test_pulse.py index c40a4556a..4eb3ffa93 100644 --- a/test/integ_tests/test_pulse.py +++ b/test/integ_tests/test_pulse.py @@ -210,6 +210,8 @@ def cz_pulse( def test_pulse_bell(arbitrary_waveform, device): + if device.status == "OFFLINE": + pytest.skip("Device offline") ( a, b, @@ -258,6 +260,8 @@ def test_pulse_bell(arbitrary_waveform, device): def test_pulse_sequence(arbitrary_waveform, device): + if device.status == "OFFLINE": + pytest.skip("Device offline") ( a, b, @@ -310,6 +314,8 @@ def test_pulse_sequence(arbitrary_waveform, device): def test_gate_calibration_run(device, pulse_sequence): + if device.status == "OFFLINE": + pytest.skip("Device offline") user_gate_calibrations = GateCalibrations({(Gate.Rx(math.pi / 2), QubitSet(0)): pulse_sequence}) num_shots = 50 bell_circuit = Circuit().rx(0, math.pi / 2).rx(1, math.pi / 2).cz(0, 1).rx(1, -math.pi / 2) diff --git a/test/unit_tests/braket/aws/test_aws_device.py b/test/unit_tests/braket/aws/test_aws_device.py index c1e034834..3770d38a2 100644 --- a/test/unit_tests/braket/aws/test_aws_device.py +++ b/test/unit_tests/braket/aws/test_aws_device.py @@ -1673,6 +1673,7 @@ def test_get_devices_simulators_only(mock_copy_session, aws_session): assert [result.name for result in results] == ["SV1"] +@pytest.mark.filterwarnings("ignore:Test Code:") @patch("braket.aws.aws_device.AwsSession.copy_session") def test_get_devices_with_error_in_region(mock_copy_session, aws_session): aws_session.search_devices.side_effect = [ diff --git a/test/unit_tests/braket/devices/test_local_simulator.py b/test/unit_tests/braket/devices/test_local_simulator.py index 08f2a19c5..8485dc5e5 100644 --- a/test/unit_tests/braket/devices/test_local_simulator.py +++ b/test/unit_tests/braket/devices/test_local_simulator.py @@ -316,7 +316,7 @@ def test_batch_circuit(): theta = FreeParameter("theta") task = Circuit().rx(angle=theta, target=0) device = LocalSimulator(dummy) - num_tasks = 10 + num_tasks = 3 circuits = [task for _ in range(num_tasks)] inputs = [{"theta": i} for i in range(num_tasks)] batch = device.run_batch(circuits, inputs=inputs, shots=10) @@ -329,7 +329,7 @@ def test_batch_with_max_parallel(): dummy = DummyProgramSimulator() task = Circuit().h(0).cnot(0, 1) device = LocalSimulator(dummy) - num_tasks = 10 + num_tasks = 3 circuits = [task for _ in range(num_tasks)] batch = device.run_batch(circuits, shots=10, max_parallel=2) assert len(batch.results()) == num_tasks @@ -341,7 +341,7 @@ def test_batch_with_annealing_problems(): dummy = DummyAnnealingSimulator() problem = Problem(ProblemType.ISING) device = LocalSimulator(dummy) - num_tasks = 10 + num_tasks = 3 problems = [problem for _ in range(num_tasks)] batch = device.run_batch(problems, shots=10) assert len(batch.results()) == num_tasks @@ -353,7 +353,7 @@ def test_batch_circuit_without_inputs(): dummy = DummyProgramSimulator() bell = Circuit().h(0).cnot(0, 1) device = LocalSimulator(dummy) - num_tasks = 10 + num_tasks = 3 circuits = [bell for _ in range(num_tasks)] batch = device.run_batch(circuits, shots=10) assert len(batch.results()) == num_tasks @@ -385,7 +385,7 @@ def test_batch_circuit_with_task_and_input_mismatch(): dummy = DummyProgramSimulator() bell = Circuit().h(0).cnot(0, 1) device = LocalSimulator(dummy) - num_tasks = 10 + num_tasks = 3 circuits = [bell for _ in range(num_tasks)] inputs = [{} for _ in range(num_tasks - 1)] with pytest.raises(ValueError): diff --git a/test/unit_tests/braket/jobs/test_hybrid_job.py b/test/unit_tests/braket/jobs/test_hybrid_job.py index 5c804b0c0..b7b7485d7 100644 --- a/test/unit_tests/braket/jobs/test_hybrid_job.py +++ b/test/unit_tests/braket/jobs/test_hybrid_job.py @@ -15,7 +15,13 @@ from braket.aws import AwsQuantumJob from braket.devices import Devices from braket.jobs import hybrid_job -from braket.jobs.config import CheckpointConfig, InstanceConfig, OutputDataConfig, StoppingCondition +from braket.jobs.config import ( + CheckpointConfig, + InstanceConfig, + OutputDataConfig, + S3DataSourceConfig, + StoppingCondition, +) from braket.jobs.hybrid_job import _sanitize, _serialize_entry_point from braket.jobs.local import LocalQuantumJob @@ -110,6 +116,7 @@ def test_decorator_non_defaults( "my_dir": Path(tempdir, "temp_dir"), "my_file": Path(tempdir, "temp_file"), "my_s3_prefix": "s3://bucket/path/to/prefix", + "my_s3_config": S3DataSourceConfig(s3_data="s3://bucket/path/to/prefix"), } @hybrid_job(