scripts/setfit/run_fewshot.py [129:151]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            if args.classifier == "pytorch":
                model = SetFitModel.from_pretrained(
                    args.model,
                    use_differentiable_head=True,
                    head_params={"out_features": len(set(train_data["label"]))},
                )
            else:
                model = SetFitModel.from_pretrained(args.model)
            model.model_body.max_seq_length = args.max_seq_length
            if args.add_normalization_layer:
                model.model_body._modules["2"] = models.Normalize()

            # Train on current split
            trainer = SetFitTrainer(
                model=model,
                train_dataset=train_data,
                eval_dataset=test_data,
                metric=metric,
                loss_class=loss_class,
                batch_size=args.batch_size,
                num_epochs=args.num_epochs,
                num_iterations=args.num_iterations,
            )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



scripts/setfit/run_zeroshot.py [126:148]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    if args.classifier == "pytorch":
        model = SetFitModel.from_pretrained(
            args.model,
            use_differentiable_head=True,
            head_params={"out_features": len(set(train_data["label"]))},
        )
    else:
        model = SetFitModel.from_pretrained(args.model)
    model.model_body.max_seq_length = args.max_seq_length
    if args.add_normalization_layer:
        model.model_body._modules["2"] = models.Normalize()

    # Train on current split
    trainer = SetFitTrainer(
        model=model,
        train_dataset=train_data,
        eval_dataset=test_data,
        metric=metric,
        loss_class=loss_class,
        batch_size=args.batch_size,
        num_epochs=args.num_epochs,
        num_iterations=args.num_iterations,
    )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



