in phi/run_eval.py [0:0]
def __init__(self, stop_tokens: torch.LongTensor, batch_size: int = 1) -> None:
"""Initialize the multiple token batch stopping criteria.
Args:
stop_tokens: Stop-tokens.
batch_size: Batch size.
"""
self.stop_tokens = stop_tokens
self.max_stop_tokens = stop_tokens.shape[-1]
self.stop_tokens_idx = torch.zeros(batch_size, dtype=torch.long, device=stop_tokens.device)