in src/infer_location.py [0:0]
def preprocess(self, text):
# Tokenize input text
inputs = self.tokenizer(text, return_tensors="pt", truncation=True, padding="max_length", max_length=64)
return inputs["input_ids"].numpy(), inputs["attention_mask"].numpy()