in src/braket/pennylane_plugin/braket_device.py [0:0]
def batch_execute(self, circuits, **run_kwargs):
if not self._parallel:
return super().batch_execute(circuits)
for circuit in circuits:
self.check_validity(circuit.operations, circuit.observables)
braket_circuits = [
self._pl_to_braket_circuit(circuit, **run_kwargs) for circuit in circuits
]
batch_shots = 0 if self.analytic else self.shots
task_batch = self._device.run_batch(
braket_circuits,
s3_destination_folder=self._s3_folder,
shots=batch_shots,
max_parallel=self._max_parallel,
max_connections=self._max_connections,
poll_timeout_seconds=self._poll_timeout_seconds,
poll_interval_seconds=self._poll_interval_seconds,
**self._run_kwargs,
)
# Call results() to retrieve the Braket results in parallel.
try:
braket_results_batch = task_batch.results(
fail_unsuccessful=True, max_retries=self._max_retries
)
# Update the tracker before raising an exception further if some circuits do not complete.
finally:
if self.tracker.active:
for task in task_batch.tasks:
tracking_data = self._tracking_data(task)
self.tracker.update(**tracking_data)
total_executions = len(task_batch.tasks) - len(task_batch.unsuccessful)
total_shots = total_executions * batch_shots
self.tracker.update(batches=1, executions=total_executions, shots=total_shots)
self.tracker.record()
return [
self._braket_to_pl_result(braket_result, circuit)
for braket_result, circuit in zip(braket_results_batch, circuits)
]