in src/llm_judge.py [0:0]
def __init__(self):
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
if self.device == "cuda":
self.model = AutoModelForCausalLM.from_pretrained(
"microsoft/Phi-3-mini-4k-instruct",
device_map="auto",
torch_dtype="auto",
trust_remote_code=True,
)
else:
self.model = AutoModelForCausalLM.from_pretrained(
"microsoft/Phi-3-mini-4k-instruct",
device_map="auto",
torch_dtype="auto",
trust_remote_code=True,
)
self.tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-4k-instruct")