aiops/ContraLSP/rare/rare_feature_diffgroup.py [78:158]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        return output

    if "gatemask" in explainers:
        trainer = Trainer(
            max_epochs=200,
            accelerator="cpu",
            log_every_n_steps=2,
            logger=TensorBoardLogger(
                save_dir=".",
                version=random.getrandbits(128),
            ),
        )
        mask = GateMaskNet(
            forward_func=f,
            model=nn.Sequential(
                RNN(
                    input_size=X.shape[-1],
                    rnn="gru",
                    hidden_size=X.shape[-1],
                    bidirectional=True,
                ),
                MLP([2 * X.shape[-1], X.shape[-1]]),
            ),
            lambda_1=0.1,   # 0.1 for our lambda is suitable
            lambda_2=0.1,
            optim="adam",
            lr=0.1,
        )
        explainer = GateMask(f)
        _attr = explainer.attribute(
            X,
            trainer=trainer,
            mask_net=mask,
            batch_size=N_ex,
            sigma=0.5,
        )
        gatemask_saliency = _attr.clone().detach()
        with open(os.path.join(save_dir, f"gatemask_saliency_{cv}.pkl"), "wb") as file:
            pkl.dump(gatemask_saliency, file)
        print("==============gatemask==============")
        print_results(gatemask_saliency, true_saliency)

    if "nnmask" in explainers:
        trainer = Trainer(
            max_epochs=200,
            accelerator='cpu',
            log_every_n_steps=2,
            logger=TensorBoardLogger(
                save_dir=".",
                version=random.getrandbits(128),
            ),
        )
        mask = ExtremalMaskNet(
            forward_func=f,
            model=nn.Sequential(
                RNN(
                    input_size=X.shape[-1],
                    rnn="gru",
                    hidden_size=X.shape[-1],
                    bidirectional=True,
                ),
                MLP([2 * X.shape[-1], X.shape[-1]]),
            ),
            optim="adam",
            lr=0.1,
        )
        explainer = ExtremalMask(f)
        _attr = explainer.attribute(
            X,
            trainer=trainer,
            mask_net=mask,
            batch_size=N_ex,
        )
        nnmask_saliency = _attr.clone().detach().numpy()
        with open(os.path.join(save_dir, f"nnmask_saliency_{cv}.pkl"), "wb") as file:
            pkl.dump(nnmask_saliency, file)
        print("==============nnmask==============")
        print_results(nnmask_saliency, true_saliency)

    if "dynamask" in explainers:
        pert = GaussianBlur(device=device)  # We use a Gaussian Blur perturbation operator
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



aiops/ContraLSP/rare/rare_time_diffgroup.py [83:163]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        return output

    if "gatemask" in explainers:
        trainer = Trainer(
            max_epochs=200,
            accelerator="cpu",
            log_every_n_steps=2,
            logger=TensorBoardLogger(
                save_dir=".",
                version=random.getrandbits(128),
            ),
        )
        mask = GateMaskNet(
            forward_func=f,
            model=nn.Sequential(
                RNN(
                    input_size=X.shape[-1],
                    rnn="gru",
                    hidden_size=X.shape[-1],
                    bidirectional=True,
                ),
                MLP([2 * X.shape[-1], X.shape[-1]]),
            ),
            lambda_1=0.1,   # 0.1 for our lambda is suitable
            lambda_2=0.1,
            optim="adam",
            lr=0.1,
        )
        explainer = GateMask(f)
        _attr = explainer.attribute(
            X,
            trainer=trainer,
            mask_net=mask,
            batch_size=N_ex,
            sigma=0.5,
        )
        gatemask_saliency = _attr.clone().detach()
        with open(os.path.join(save_dir, f"gatemask_saliency_{cv}.pkl"), "wb") as file:
            pkl.dump(gatemask_saliency, file)
        print("==============gatemask==============")
        print_results(gatemask_saliency, true_saliency)

    if "nnmask" in explainers:
        trainer = Trainer(
            max_epochs=200,
            accelerator='cpu',
            log_every_n_steps=2,
            logger=TensorBoardLogger(
                save_dir=".",
                version=random.getrandbits(128),
            ),
        )
        mask = ExtremalMaskNet(
            forward_func=f,
            model=nn.Sequential(
                RNN(
                    input_size=X.shape[-1],
                    rnn="gru",
                    hidden_size=X.shape[-1],
                    bidirectional=True,
                ),
                MLP([2 * X.shape[-1], X.shape[-1]]),
            ),
            optim="adam",
            lr=0.1,
        )
        explainer = ExtremalMask(f)
        _attr = explainer.attribute(
            X,
            trainer=trainer,
            mask_net=mask,
            batch_size=N_ex,
        )
        nnmask_saliency = _attr.clone().detach().numpy()
        with open(os.path.join(save_dir, f"nnmask_saliency_{cv}.pkl"), "wb") as file:
            pkl.dump(nnmask_saliency, file)
        print("==============nnmask==============")
        print_results(nnmask_saliency, true_saliency)

    if "dynamask" in explainers:
        pert = GaussianBlur(device=device)  # We use a Gaussian Blur perturbation operator
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



