def update_cache()

in tt_embeddings_ops.py [0:0]


    def update_cache(self, indices: torch.Tensor):
        if self.use_cache:
            # pyre-fixme[16]
            tt_embeddings.update_cache_state(indices, self.hashtbl, self.cache_freq)