train_curve.py [264:285]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        log_interval = 10

        if batch_idx % log_interval == 0:
            num_samples = batch_idx * len(data)
            num_epochs = len(train_loader.dataset)
            percent_complete = 100.0 * batch_idx / len(train_loader)

            predicted_labels = output.argmax(dim=1)
            corrects = (
                predicted_labels == target
            ).float().sum() / target.numel()

            print(
                f"Train Epoch: {epoch} [{num_samples}/{num_epochs} ({percent_complete:.0f}%)]\t"
                f"Loss: {loss.item():.6f} Correct: {corrects.item():.4f}"
            )

    model.apply(lambda m: setattr(m, "return_feats", False))

    avg_loss = avg_loss / len(train_loader)

    return avg_loss, regime_params
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



train_indep.py [247:267]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        log_interval = 10

        if batch_idx % log_interval == 0:
            num_samples = batch_idx * len(data)
            num_epochs = len(train_loader.dataset)
            percent_complete = 100.0 * batch_idx / len(train_loader)

            predicted_labels = output.argmax(dim=1)
            corrects = (
                predicted_labels == target
            ).float().sum() / target.numel()

            print(
                f"Train Epoch: {epoch} [{num_samples}/{num_epochs} ({percent_complete:.0f}%)]\t"
                f"Loss: {loss.item():.6f} Correct: {corrects.item():.4f}"
            )

    model.apply(lambda m: setattr(m, "return_feats", False))
    avg_loss = avg_loss / len(train_loader)

    return avg_loss, regime_params
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



