def _infer_device()

in src/evaluate/evaluator/base.py [0:0]


    def _infer_device() -> int:
        """Helper function to check if GPU or CPU is available for inference."""
        # try infer with torch first
        try:
            import torch

            if torch.cuda.is_available():
                device = 0  # first GPU
            else:
                device = -1  # CPU
        except ImportError:
            # if not available try TF
            try:
                import tensorflow as tf

                if len(tf.config.list_physical_devices("GPU")) > 0:
                    device = 0  # first GPU
                else:
                    device = -1  # CPU
            except ImportError:
                device = -1

        if device == -1:
            logger.info("No GPU found. The default device for pipeline inference is set to CPU.")
        else:
            logger.info("GPU found. The default device for pipeline inference is set to GPU (CUDA:0).")

        return device