llm_perf/benchmark_runners/cpu/update_llm_perf_cpu_onnxruntime.py [82:105]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            scenario=scenario_config,
            launcher=launcher_config,
            backend=backend_config,
        )

    def _get_weights_configs(self, subset) -> Dict[str, Dict[str, Any]]:
        if subset == "unquantized":
            return {
                "float32": {
                    "torch_dtype": "float32",
                    "quant_scheme": None,
                    "quant_config": {},
                },
                "float16": {
                    "torch_dtype": "float16",
                    "quant_scheme": None,
                    "quant_config": {},
                },
                "bfloat16": {
                    "torch_dtype": "bfloat16",
                    "quant_scheme": None,
                    "quant_config": {},
                },
            }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



llm_perf/benchmark_runners/cuda/update_llm_perf_cuda_pytorch.py [95:118]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            scenario=scenario_config,
            launcher=launcher_config,
            backend=backend_config,
        )

    def _get_weights_configs(self, subset) -> Dict[str, Dict[str, Any]]:
        if subset == "unquantized":
            return {
                "float32": {
                    "torch_dtype": "float32",
                    "quant_scheme": None,
                    "quant_config": {},
                },
                "float16": {
                    "torch_dtype": "float16",
                    "quant_scheme": None,
                    "quant_config": {},
                },
                "bfloat16": {
                    "torch_dtype": "bfloat16",
                    "quant_scheme": None,
                    "quant_config": {},
                },
            }
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



