in main.py [0:0]
def gen_Classifiers_name(args, transformer, datasets):
"""Generator of uninstantiated classifiers."""
gamma = args.clfs.gamma_force_generalization
data_weight = len(datasets["train"]) / len(datasets["test"])
for n_hid in OmegaConf.to_container(args.clfs.nhiddens, resolve=True):
for n_lay in OmegaConf.to_container(args.clfs.nlayers, resolve=True):
for k_pru in OmegaConf.to_container(args.clfs.kprune, resolve=True):
clf_name = (
f"clf_nhid_{n_hid}/clf_nlay_{n_lay}/clf_kpru_{k_pru}/gamma_{gamma}/"
)
Classifier = partial(
MLP, hidden_size=n_hid, n_hidden_layers=n_lay, k_prune=k_pru
)
kwargs = {}
if not args.clfs.is_reinitialize:
kwargs["previous_mlp"] = transformer.module_.Q_zy
Classifier = partial(
NeuralNetClassifier,
module=partial(
MCTrnsfClassifier,
transformer=transformer.module_,
Classifier=Classifier,
**OmegaConf.to_container(args.clfs.kwargs, resolve=True),
**kwargs,
),
# don't use any regularization if you only care about training (e.g. Rademacher)
optimizer=get_optim(args),
criterion=partial(
CrossEntropyLossGeneralize,
gamma=gamma * data_weight,
map_target_position=datasets["train"].map_target_position,
),
)
yield Classifier, clf_name