custom/gpt2/run_gpt2.py [112:118]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    model_output = model(inp)
    target = longer_sample[:, 1:]
    logits = model_output[0]
    lprobs = F.log_softmax(logits, dim=-1)
    assert lprobs.size(0) == 1, 'We work on flat sequences'
    loss = F.nll_loss(lprobs[0], target[0], reduction='sum')
    true_token_logits = -F.nll_loss(logits[0], target[0], reduction='none')
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



custom/gpt2/run_gpt2.py [247:253]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            model_output = model(inp)
            target = longer_sample[:, 1:]
            logits = model_output[0]
            lprobs = F.log_softmax(logits, dim=-1)
            assert lprobs.size(0) == 1, 'We work on flat sequences'
            loss = F.nll_loss(lprobs[0], target[0], reduction='sum')
            true_token_logits = -F.nll_loss(logits[0], target[0], reduction='none')
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



