src/co_op_translator/core/llm/text_translator.py [45:53]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        response = self.client.chat.completions.create(
            model=self.get_model_name(),
            messages=[
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": prompt},
            ],
            max_tokens=2000,
        )
        translated_text = remove_code_backticks(response.choices[0].message.content)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



src/co_op_translator/core/llm/text_translator.py [71:79]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        response = self.client.chat.completions.create(
            model=self.get_model_name(),
            messages=[
                {"role": "system", "content": "You are a helpful assistant."},
                {"role": "user", "content": prompt},
            ],
            max_tokens=2000,
        )
        translated_text = remove_code_backticks(response.choices[0].message.content)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



