def load_mistral()

in cookbook-efforts/kto-preference/preference_gen.py [0:0]


def load_mistral(task: Task) -> LLM:
    checkpoint = "mistralai/Mistral-7B-Instruct-v0.2"
    return InferenceEndpointsLLM(
        checkpoint,
        token=HF_TOKEN,
        task=task,
        max_new_tokens=512,
        prompt_format="llama2",
    )