aiops/ContraLSP/rare/rare_feature.py [77:145]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    if "gatemask" in explainers:
        trainer = Trainer(
            max_epochs=200,
            accelerator="cpu",
            log_every_n_steps=2,
            logger=TensorBoardLogger(
                save_dir=".",
                version=random.getrandbits(128),
            ),
        )
        mask = GateMaskNet(
            forward_func=f,
            model=nn.Sequential(
                RNN(
                    input_size=X.shape[-1],
                    rnn="gru",
                    hidden_size=X.shape[-1],
                    bidirectional=True,
                ),
                MLP([2 * X.shape[-1], X.shape[-1]]),
            ),
            lambda_1=0.1,   # 0.01 for our lambda is suitable
            lambda_2=0.1,
            optim="adam",
            lr=0.1,
        )
        explainer = GateMask(f)
        _attr = explainer.attribute(
            X,
            trainer=trainer,
            mask_net=mask,
            batch_size=N_ex,
            sigma=0.5,
        )
        gatemask_saliency = _attr.clone().detach()
        with open(os.path.join(save_dir, f"gatemask_saliency_{cv}.pkl"), "wb") as file:
            pkl.dump(gatemask_saliency, file)
        print("==============gatemask==============")
        print_results(gatemask_saliency, true_saliency)

    if "nnmask" in explainers:
        trainer = Trainer(
            max_epochs=200,
            accelerator='cpu',
            log_every_n_steps=2,
            logger=TensorBoardLogger(
                save_dir=".",
                version=random.getrandbits(128),
            ),
        )
        mask = ExtremalMaskNet(
            forward_func=f,
            model=nn.Sequential(
                RNN(
                    input_size=X.shape[-1],
                    rnn="gru",
                    hidden_size=X.shape[-1],
                    bidirectional=True,
                ),
                MLP([2 * X.shape[-1], X.shape[-1]]),
            ),
            optim="adam",
            lr=0.1,
        )
        explainer = ExtremalMask(f)
        _attr = explainer.attribute(
            X,
            trainer=trainer,
            mask_net=mask,
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



aiops/ContraLSP/rare/rare_time_diffgroup.py [85:153]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    if "gatemask" in explainers:
        trainer = Trainer(
            max_epochs=200,
            accelerator="cpu",
            log_every_n_steps=2,
            logger=TensorBoardLogger(
                save_dir=".",
                version=random.getrandbits(128),
            ),
        )
        mask = GateMaskNet(
            forward_func=f,
            model=nn.Sequential(
                RNN(
                    input_size=X.shape[-1],
                    rnn="gru",
                    hidden_size=X.shape[-1],
                    bidirectional=True,
                ),
                MLP([2 * X.shape[-1], X.shape[-1]]),
            ),
            lambda_1=0.1,   # 0.1 for our lambda is suitable
            lambda_2=0.1,
            optim="adam",
            lr=0.1,
        )
        explainer = GateMask(f)
        _attr = explainer.attribute(
            X,
            trainer=trainer,
            mask_net=mask,
            batch_size=N_ex,
            sigma=0.5,
        )
        gatemask_saliency = _attr.clone().detach()
        with open(os.path.join(save_dir, f"gatemask_saliency_{cv}.pkl"), "wb") as file:
            pkl.dump(gatemask_saliency, file)
        print("==============gatemask==============")
        print_results(gatemask_saliency, true_saliency)

    if "nnmask" in explainers:
        trainer = Trainer(
            max_epochs=200,
            accelerator='cpu',
            log_every_n_steps=2,
            logger=TensorBoardLogger(
                save_dir=".",
                version=random.getrandbits(128),
            ),
        )
        mask = ExtremalMaskNet(
            forward_func=f,
            model=nn.Sequential(
                RNN(
                    input_size=X.shape[-1],
                    rnn="gru",
                    hidden_size=X.shape[-1],
                    bidirectional=True,
                ),
                MLP([2 * X.shape[-1], X.shape[-1]]),
            ),
            optim="adam",
            lr=0.1,
        )
        explainer = ExtremalMask(f)
        _attr = explainer.attribute(
            X,
            trainer=trainer,
            mask_net=mask,
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



