in tt_embeddings_ops.py [0:0]
def get_params(self) -> List[torch.Tensor]: params = self.tt_cores if self.use_cache: params.append(self.cache_weight) return params