def results()

in src/braket/aws/aws_quantum_task_batch.py [0:0]


    def results(self, fail_unsuccessful=False, max_retries=MAX_RETRIES, use_cached_value=True):
        """Retrieves the result of every task in the batch.

        Polling for results happens in parallel; this method returns when all tasks
        have reached a terminal state. The result of this method is cached.

        Args:
            fail_unsuccessful (bool): If set to `True`, this method will fail
                if any task in the batch fails to return a result even after
                `max_retries` retries.
            max_retries (int): Maximum number of times to retry any failed tasks,
                i.e. any tasks in the `FAILED` or `CANCELLED` state or that didn't
                complete within the timeout. Default: 3.
            use_cached_value (bool): If `False`, will refetch the results from S3,
                even when results have already been cached. Default: `True`.

        Returns:
            List[AwsQuantumTask]: The results of all of the tasks in the batch.
                `FAILED`, `CANCELLED`, or timed out tasks will have a result of None
        """
        if not self._results or not use_cached_value:
            self._results = AwsQuantumTaskBatch._retrieve_results(self._tasks, self._max_workers)
            self._unsuccessful = {
                task.id for task, result in zip(self._tasks, self._results) if not result
            }

        retries = 0
        while self._unsuccessful and retries < max_retries:
            self.retry_unsuccessful_tasks()
            retries = retries + 1

        if fail_unsuccessful and self._unsuccessful:
            raise RuntimeError(
                f"{len(self._unsuccessful)} tasks failed to complete after {max_retries} retries"
            )
        return self._results