Skip to content

Commit

Permalink
Merge branch 'main' into tox_parallel
Browse files Browse the repository at this point in the history
  • Loading branch information
speller26 authored Mar 1, 2024
2 parents 67bada3 + 0370766 commit bda8887
Show file tree
Hide file tree
Showing 23 changed files with 355 additions and 141 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,4 @@ __pycache__/
/build
/venv
/dist
/model.tar.gz
26 changes: 26 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,31 @@
# Changelog

## v1.72.1 (2024-02-28)

### Bug Fixes and Other Changes

* escape slash in metrics prefix

## v1.72.0 (2024-02-27)

### Features

* FreeParameterExpression division

## v1.71.0 (2024-02-26)

### Features

* update log stream prefix for new jobs

## v1.70.3 (2024-02-21)

### Bug Fixes and Other Changes

* remove test with job creation with qpu
* use the caller's account id based on the session
* docs: add note about using env variables for endpoint

## v1.70.2 (2024-02-14)

### Bug Fixes and Other Changes
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
"cloudpickle==2.2.1",
"nest-asyncio",
"networkx",
"numpy",
"numpy<2",
"openpulse",
"openqasm3",
"sympy",
Expand Down
2 changes: 1 addition & 1 deletion src/braket/_sdk/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@
Version number (major.minor.patch[-label])
"""

__version__ = "1.70.3.dev0"
__version__ = "1.72.2.dev0"
19 changes: 15 additions & 4 deletions src/braket/aws/aws_quantum_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,17 @@ def name(self) -> str:
"""str: The name of the quantum job."""
return self.metadata(use_cached_value=True).get("jobName")

@property
def _logs_prefix(self) -> str:
"""str: the prefix for the job logs."""
# jobs ARNs used to contain the job name and use a log prefix of `job-name`
# now job ARNs use a UUID and a log prefix of `job-name/UUID`
return (
f"{self.name}"
if self.arn.endswith(self.name)
else f"{self.name}/{self.arn.split('/')[-1]}"
)

def state(self, use_cached_value: bool = False) -> str:
"""The state of the quantum hybrid job.
Expand Down Expand Up @@ -381,7 +392,6 @@ def logs(self, wait: bool = False, poll_interval_seconds: int = 5) -> None:
)

log_group = AwsQuantumJob.LOG_GROUP
stream_prefix = f"{self.name}/"
stream_names = [] # The list of log streams
positions = {} # The current position in each stream, map of stream name -> position
instance_count = self.metadata(use_cached_value=True)["instanceConfig"]["instanceCount"]
Expand All @@ -395,7 +405,7 @@ def logs(self, wait: bool = False, poll_interval_seconds: int = 5) -> None:
has_streams = logs.flush_log_streams(
self._aws_session,
log_group,
stream_prefix,
self._logs_prefix,
stream_names,
positions,
instance_count,
Expand Down Expand Up @@ -455,14 +465,15 @@ def metrics(
"""
fetcher = CwlInsightsMetricsFetcher(self._aws_session)
metadata = self.metadata(True)
job_name = metadata["jobName"]
job_start = None
job_end = None
if "startedAt" in metadata:
job_start = int(metadata["startedAt"].timestamp())
if self.state() in AwsQuantumJob.TERMINAL_STATES and "endedAt" in metadata:
job_end = int(math.ceil(metadata["endedAt"].timestamp()))
return fetcher.get_metrics_for_job(job_name, metric_type, statistic, job_start, job_end)
return fetcher.get_metrics_for_job(
self.name, metric_type, statistic, job_start, job_end, self._logs_prefix
)

def cancel(self) -> str:
"""Cancels the job.
Expand Down
5 changes: 5 additions & 0 deletions src/braket/aws/aws_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -792,6 +792,11 @@ def copy_session(
config = Config(max_pool_connections=max_connections) if max_connections else None
session_region = self.boto_session.region_name
new_region = region or session_region

# note that this method does not copy a custom Braket endpoint URL, since those are
# region-specific. If you have an endpoint that you wish to be used by copied AwsSessions
# (i.e. for task batching), please use the `BRAKET_ENDPOINT` environment variable.

creds = self.boto_session.get_credentials()
default_bucket = self._default_bucket if self._custom_default_bucket else None
profile_name = self.boto_session.profile_name
Expand Down
9 changes: 6 additions & 3 deletions src/braket/jobs/metrics_data/cwl_insights_metrics_fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,7 @@ def get_metrics_for_job(
statistic: MetricStatistic = MetricStatistic.MAX,
job_start_time: int | None = None,
job_end_time: int | None = None,
stream_prefix: str | None = None,
) -> dict[str, list[Union[str, float, int]]]:
"""Synchronously retrieves all the algorithm metrics logged by a given Hybrid Job.
Expand All @@ -150,6 +151,8 @@ def get_metrics_for_job(
Default: 3 hours before job_end_time.
job_end_time (int | None): If the hybrid job is complete, this should be the time at
which the hybrid job finished. Default: current time.
stream_prefix (str | None): If a logs prefix is provided, it will be used instead
of the job name.
Returns:
dict[str, list[Union[str, float, int]]]: The metrics data, where the keys
Expand All @@ -166,11 +169,11 @@ def get_metrics_for_job(
query_end_time = job_end_time or int(time.time())
query_start_time = job_start_time or query_end_time - self.QUERY_DEFAULT_JOB_DURATION

# The hybrid job name needs to be unique to prevent jobs with similar names from being
# conflated.
stream_prefix = (stream_prefix or job_name).replace("/", "\\/")

query = (
f"fields @timestamp, @message "
f"| filter @logStream like /^{job_name}\\// "
f"| filter @logStream like /^{stream_prefix}\\// "
f"| filter @message like /Metrics - /"
)

Expand Down
9 changes: 9 additions & 0 deletions src/braket/parametric/free_parameter_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,15 @@ def __mul__(self, other: FreeParameterExpression):
def __rmul__(self, other: FreeParameterExpression):
return FreeParameterExpression(other * self.expression)

def __truediv__(self, other):
if issubclass(type(other), FreeParameterExpression):
return FreeParameterExpression(self.expression / other.expression)
else:
return FreeParameterExpression(self.expression / other)

def __rtruediv__(self, other: FreeParameterExpression):
return FreeParameterExpression(other / self.expression)

def __pow__(self, other: FreeParameterExpression, modulo: float = None):
if issubclass(type(other), FreeParameterExpression):
return FreeParameterExpression(self.expression**other.expression)
Expand Down
92 changes: 92 additions & 0 deletions test/integ_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,62 @@
# language governing permissions and limitations under the License.

import os
import random
import string

import boto3
import pytest
from botocore.exceptions import ClientError

from braket.aws.aws_device import AwsDevice
from braket.aws.aws_quantum_job import AwsQuantumJob
from braket.aws.aws_session import AwsSession

SV1_ARN = "arn:aws:braket:::device/quantum-simulator/amazon/sv1"
DM1_ARN = "arn:aws:braket:::device/quantum-simulator/amazon/dm1"
TN1_ARN = "arn:aws:braket:::device/quantum-simulator/amazon/tn1"
SIMULATOR_ARNS = [SV1_ARN, DM1_ARN, TN1_ARN]

job_complete_name = "".join(random.choices(string.ascii_lowercase + string.digits, k=12))
job_fail_name = "".join(random.choices(string.ascii_lowercase + string.digits, k=12))


def pytest_configure_node(node):
"""xdist hook"""
node.workerinput["JOB_COMPLETED_NAME"] = job_complete_name
node.workerinput["JOB_FAILED_NAME"] = job_fail_name


def pytest_xdist_node_collection_finished(ids):
"""Uses the pytest xdist hook to check whether tests with jobs are to be ran.
If they are, the first reporting worker sets a flag that it created the tests
to avoid concurrency limits. This is the first time in the pytest setup the
controller has all the tests to be ran from the worker nodes.
"""
run_jobs = any("job" in test for test in ids)
profile_name = os.environ["AWS_PROFILE"]
aws_session = AwsSession(boto3.session.Session(profile_name=profile_name))
if run_jobs and os.getenv("JOBS_STARTED") is None:
AwsQuantumJob.create(
"arn:aws:braket:::device/quantum-simulator/amazon/sv1",
job_name=job_fail_name,
source_module="test/integ_tests/job_test_script.py",
entry_point="job_test_script:start_here",
aws_session=aws_session,
wait_until_complete=False,
hyperparameters={"test_case": "failed"},
)
AwsQuantumJob.create(
"arn:aws:braket:::device/quantum-simulator/amazon/sv1",
job_name=job_complete_name,
source_module="test/integ_tests/job_test_script.py",
entry_point="job_test_script:start_here",
aws_session=aws_session,
wait_until_complete=False,
hyperparameters={"test_case": "completed"},
)
os.environ["JOBS_STARTED"] = "True"


@pytest.fixture(scope="session")
def boto_session():
Expand Down Expand Up @@ -82,3 +131,46 @@ def s3_prefix():
@pytest.fixture(scope="module")
def s3_destination_folder(s3_bucket, s3_prefix):
return AwsSession.S3DestinationFolder(s3_bucket, s3_prefix)


@pytest.fixture(scope="session")
def braket_simulators(aws_session):
return {
simulator_arn: AwsDevice(simulator_arn, aws_session) for simulator_arn in SIMULATOR_ARNS
}


@pytest.fixture(scope="session")
def braket_devices():
return AwsDevice.get_devices(statuses=["RETIRED", "ONLINE", "OFFLINE"])


@pytest.fixture(scope="session", autouse=True)
def created_braket_devices(aws_session, braket_devices):
return {device.arn: device for device in braket_devices}


@pytest.fixture(scope="session")
def job_completed_name(request):
return request.config.workerinput["JOB_COMPLETED_NAME"]


@pytest.fixture(scope="session")
def job_failed_name(request):
return request.config.workerinput["JOB_FAILED_NAME"]


@pytest.fixture(scope="session", autouse=True)
def completed_quantum_job(aws_session, job_completed_name):
account = boto3.client("sts").get_caller_identity().get("Account")
region = aws_session.region
job = AwsQuantumJob(arn=f"arn:aws:braket:{region}:{account}:job/{job_completed_name}")
return job


@pytest.fixture(scope="session", autouse=True)
def failed_quantum_job(aws_session, job_failed_name):
account = boto3.client("sts").get_caller_identity().get("Account")
region = aws_session.region
job = AwsQuantumJob(arn=f"arn:aws:braket:{region}:{account}:job/{job_failed_name}")
return job
10 changes: 5 additions & 5 deletions test/integ_tests/gate_model_device_testing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,8 @@ def result_types_observable_not_in_instructions(device: Device, run_kwargs: Dict
.variance(observable=Observable.Y(), target=[3])
)
bell_qasm = bell.to_ir(ir_type=IRType.OPENQASM)
for task in (bell, bell_qasm):
result = device.run(task, **run_kwargs).result()
results = device.run_batch([bell, bell_qasm], **run_kwargs).results()
for result in results:
assert np.allclose(result.values[0], 0, **tol)
assert np.allclose(result.values[1], 1, **tol)

Expand All @@ -103,9 +103,9 @@ def result_types_zero_shots_bell_pair_testing(
circuit.amplitude(["01", "10", "00", "11"])
if include_state_vector:
circuit.state_vector()
tasks = (circuit, circuit.to_ir(ir_type=IRType.OPENQASM))
for task in tasks:
result = device.run(task, **run_kwargs).result()
tasks = [circuit, circuit.to_ir(ir_type=IRType.OPENQASM)]
results = device.run_batch(tasks, **run_kwargs).results()
for result in results:
assert len(result.result_types) == 3 if include_state_vector else 2
assert np.allclose(
result.get_value_by_result_type(
Expand Down
4 changes: 2 additions & 2 deletions test/integ_tests/job_test_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ def completed_job_script():
device = AwsDevice(get_job_device_arn())

bell = Circuit().h(0).cnot(0, 1)
for count in range(5):
task = device.run(bell, shots=100)
for count in range(3):
task = device.run(bell, shots=10)
print(task.result().measurement_counts)
save_job_result({"converged": True, "energy": -0.2})
save_job_checkpoint({"some_data": "abc"}, checkpoint_file_suffix="plain_data")
Expand Down
1 change: 1 addition & 0 deletions test/integ_tests/test_create_local_quantum_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ def test_completed_local_job(aws_session, capsys):

for data in logs_to_validate:
assert data in log_data

finally:
os.chdir(current_dir)

Expand Down
Loading

0 comments on commit bda8887

Please sign in to comment.