def get_callbakcs()

in utils/train.py [0:0]


def get_callbakcs(callbacks, chckpnt_dirnames, is_continue_train,
                  is_continue_best, checkpoint_epochs, datasets,
                  monitor_best, patience, seed, is_progressbar,
                  tensorboard_dir, is_train):
    for chckpnt_dirname in chckpnt_dirnames:
        chckpt_last = get_checkpoint(chckpnt_dirname, monitor="last")
        callbacks.append(chckpt_last)

    # loading from previous checkpoint to continue training
    if is_continue_train:
        if is_continue_best:
            chckpt_cont = get_checkpoint(
                chckpnt_dirnames[0], monitor=monitor_best)
        else:
            chckpt_cont = chckpt_last
        # will continue from last dirname
        load_state = LoadInitState(chckpt_cont)
        callbacks.append(load_state)

    # checkpoint from a given epoch
    if checkpoint_epochs is not None:
        for chckpnt_dirname in chckpnt_dirnames:
            callbacks.append(
                get_checkpoint(chckpnt_dirname, monitor=checkpoint_epochs)
            )

    # Nota Bene : the best checkpoint added will be the one logged with a "+"
    if "valid" in datasets:
        for chckpnt_dirname in chckpnt_dirnames:
            chckpt_best = get_checkpoint(chckpnt_dirname, monitor=monitor_best)
            callbacks.append(chckpt_best)

        if patience is not None:
            callbacks.append(EarlyStopping(patience=patience))

    if seed is not None:
        callbacks.append(FixRandomSeed(seed))

    if is_progressbar:
        callbacks.append(ProgressBar())

    if tensorboard_dir is not None and is_train:
        if os.path.exists(tensorboard_dir) and os.path.isdir(tensorboard_dir):
            shutil.rmtree(tensorboard_dir)
        writer = SummaryWriter(tensorboard_dir)
        callbacks.append(TensorBoard(writer))

    return callbacks