graph G { compound="true" rankdir="TB" bgcolor="white" fontname="Tahoma" node [ fixedsize="false" fontname="Tahoma" color="white" fillcolor="deepskyblue2" fontcolor="black" shape="box" style="filled" penwidth="1.0" ] edge [ fontname="Arial" color="#00688b" fontcolor="black" fontsize="12" arrowsize="0.5" penwidth="1.0" ] "[backends/gaudi/server/text_generation_server/models/flash_vlm_causal_lm.py]" -- "[backends/gaudi/server/text_generation_server/models/flash_causal_lm.py]" [label=" 5 ", penwidth="5", color="#00688bA5"]; "[backends/gaudi/server/text_generation_server/models/mllama_causal_lm.py]" -- "[backends/gaudi/server/text_generation_server/models/flash_causal_lm.py]" [label=" 4 ", penwidth="4", color="#00688b93"]; "[backends/gaudi/server/text_generation_server/models/mllama_causal_lm.py]" -- "[backends/gaudi/server/text_generation_server/models/flash_vlm_causal_lm.py]" [label=" 4 ", penwidth="4", color="#00688b93"]; "[backends/gaudi/server/text_generation_server/models/flash_causal_lm.py]" -- "[backends/gaudi/server/text_generation_server/models/__init__.py]" [label=" 3 ", penwidth="3", color="#00688b82"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen3_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma2_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_deepseek_v3_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mixtral_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_dbrx_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_neox_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_starcoder2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen3_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_deepseek_v2_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_phi_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_phi_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_rw_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_cohere_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_dbrx_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_rw_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gptj_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gptj_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_cohere_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gptj_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_deepseek_v2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_cohere_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_starcoder2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_cohere_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma2_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama4_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_deepseek_v2_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llava_next.py]" -- "[backends/gaudi/server/text_generation_server/models/flash_causal_lm.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_rw_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_dbrx_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_starcoder2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama4_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_rw_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mixtral_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llava_next.py]" -- "[backends/gaudi/server/text_generation_server/models/flash_vlm_causal_lm.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_cohere_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mixtral_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_starcoder2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_phi_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen3_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_phi_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_starcoder2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen2_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/mllama_causal_lm.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/idefics3.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_neox_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama4_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_cohere_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_phi_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma2_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_qwen3_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama4_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_deepseek_v2_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_neox_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gptj_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_rw_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama4_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_starcoder2_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_rw_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_mistral_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_gemma2_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_neox_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_llama4_modeling.py]" -- "[backends/gaudi/server/text_generation_server/models/custom_modeling/flash_cohere_modeling.py]" [label=" 2 ", penwidth="2", color="#00688b70"]; }