src/mlm/scorers.py [205:215]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        batch_size = 0

        for ctx_idx, (sent_idxs, token_ids, valid_length) in enumerate(batch):

            ctx = self._ctxs[ctx_idx]
            batch_size += sent_idxs.shape[0]
            token_ids = token_ids.as_in_context(ctx)
            valid_length = valid_length.as_in_context(ctx)

            # out is ((batch size, max_seq_len, vocab size), new states)
            out = self._model(token_ids)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



src/mlm/scorers.py [251:261]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        batch_size = 0

        for ctx_idx, (sent_idxs, token_ids, valid_length) in enumerate(batch):

            ctx = self._ctxs[ctx_idx]
            batch_size += sent_idxs.shape[0]
            token_ids = token_ids.as_in_context(ctx)
            valid_length = valid_length.as_in_context(ctx)

            # out is ((batch size, max_seq_len, vocab size), new states)
            out = self._model(token_ids)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



