in trl/scripts/env.py [0:0]
def print_env():
devices = None
if torch.cuda.is_available():
devices = [torch.cuda.get_device_name(i) for i in range(torch.cuda.device_count())]
elif torch.backends.mps.is_available():
devices = ["MPS"]
elif torch.xpu.is_available():
devices = [torch.xpu.get_device_name(i) for i in range(torch.xpu.device_count())]
accelerate_config = accelerate_config_str = "not found"
# Get the default from the config file.
if os.path.isfile(default_config_file):
accelerate_config = load_config_from_file(default_config_file).to_dict()
accelerate_config_str = (
"\n" + "\n".join([f" - {prop}: {val}" for prop, val in accelerate_config.items()])
if isinstance(accelerate_config, dict)
else accelerate_config
)
commit_hash = get_git_commit_hash("trl")
info = {
"Platform": platform.platform(),
"Python version": platform.python_version(),
"TRL version": f"{__version__}+{commit_hash[:7]}" if commit_hash else __version__,
"PyTorch version": version("torch"),
"accelerator(s)": ", ".join(devices) if devices is not None else "cpu",
"Transformers version": version("transformers"),
"Accelerate version": version("accelerate"),
"Accelerate config": accelerate_config_str,
"Datasets version": version("datasets"),
"HF Hub version": version("huggingface_hub"),
"bitsandbytes version": version("bitsandbytes") if is_bitsandbytes_available() else "not installed",
"DeepSpeed version": version("deepspeed") if is_deepspeed_available() else "not installed",
"Diffusers version": version("diffusers") if is_diffusers_available() else "not installed",
"Liger-Kernel version": version("liger_kernel") if is_liger_kernel_available() else "not installed",
"LLM-Blender version": version("llm_blender") if is_llm_blender_available() else "not installed",
"OpenAI version": version("openai") if is_openai_available() else "not installed",
"PEFT version": version("peft") if is_peft_available() else "not installed",
"vLLM version": version("vllm") if is_vllm_available() else "not installed",
}
info_str = "\n".join([f"- {prop}: {val}" for prop, val in info.items()])
print(f"\nCopy-paste the following information when reporting an issue:\n\n{info_str}\n") # noqa