def _prepare_chat_template()

in src/lighteval/tasks/prompt_manager.py [0:0]


    def _prepare_chat_template(self, doc: Doc, tokenize: bool = True) -> str:
        """Prepare prompt using chat template format."""
        messages = []
        instruction_used = False  # Flag to check if instruction is used in the first few-shot example

        # Add system prompt if available
        if self.system_prompt is not None:
            messages.append({"role": "system", "content": self.system_prompt})

        # Add few-shot examples
        for ix, fewshot_sample in enumerate(doc.fewshot_samples):
            query = self._extract_query(fewshot_sample.query, fewshot_sample.instruction)
            if ix == 0 and doc.instruction is not None:
                instruction_used = True
                query = doc.instruction + query

            messages.append({"role": "user", "content": query})
            messages.append({"role": "assistant", "content": fewshot_sample.get_golds()[0]})

        # Add main query
        main_query = self._extract_query(doc.query, doc.instruction)

        if doc.instruction is not None and not instruction_used:
            # If instruction is provided, prepend it to the main query
            main_query = doc.instruction + main_query

        messages.append({"role": "user", "content": main_query})

        if tokenize:  # for local models
            assert self.tokenizer is not None, "Tokenizer must be set for chat template formatting."

            return self.tokenizer.apply_chat_template(
                messages,
                tokenize=False,
                add_generation_prompt=True,
            )

        else:  # for apis
            return messages