def tensorboard()

in dib/predefined/mlp.py [0:0]


    def tensorboard(self, writer, epoch, mode="on_grad_computed"):
        name = type(self).__name__

        if mode == "on_grad_computed":
            for k, v in self._to_plot_activation.items():
                writer.add_histogram(
                    f"activations/{name}/" + k, v, global_step=epoch)
            self._to_plot_activation = dict()

            writer.add_histogram(
                f"weights/{name}/w_tohid", self.to_hidden.weight, global_step=epoch
            )
            writer.add_histogram(
                f"weights/{name}/b_tohid", self.to_hidden.bias, global_step=epoch
            )

            writer.add_histogram(
                f"grad/{name}/w_tohid", self.to_hidden.weight.grad, global_step=epoch
            )
            writer.add_histogram(
                f"grad/{name}/b_tohid", self.to_hidden.bias.grad, global_step=epoch
            )

            if self.n_hidden_layers != 0:
                for i, lin in enumerate(self.linears):
                    writer.add_histogram(
                        f"weights/{name}/w_lin{i}", lin.weight, global_step=epoch
                    )
                    writer.add_histogram(
                        f"weights/{name}/b_lin{i}", lin.bias, global_step=epoch
                    )
                writer.add_histogram(
                    f"weights/{name}/w_out", self.out.weight, global_step=epoch
                )
                writer.add_histogram(
                    f"weights/{name}/b_out", self.out.bias, global_step=epoch
                )

                for i, lin in enumerate(self.linears):
                    writer.add_histogram(
                        f"grad/{name}/w_lin{i}", lin.weight.grad, global_step=epoch
                    )
                    writer.add_histogram(
                        f"grad/{name}/b_lin{i}", lin.bias.grad, global_step=epoch
                    )
                writer.add_histogram(
                    f"grad/{name}/w_out", self.out.weight.grad, global_step=epoch
                )
                writer.add_histogram(
                    f"grad/{name}/b_out", self.out.bias.grad, global_step=epoch
                )