def _log_lr_and_loss()

in ignite/handlers/lr_finder.py [0:0]


    def _log_lr_and_loss(self, trainer: Engine, output_transform: Callable, smooth_f: float, diverge_th: float) -> None:
        output = trainer.state.output
        loss = output_transform(output)
        if not isinstance(loss, float):
            if isinstance(loss, torch.Tensor):
                if (loss.ndimension() == 0) or (loss.ndimension() == 1 and len(loss) == 1):
                    loss = loss.item()
                else:
                    raise ValueError(
                        "if output of the engine is torch.Tensor, then "
                        "it must be 0d torch.Tensor or 1d torch.Tensor with 1 element, "
                        f"but got torch.Tensor of shape {loss.shape}"
                    )
            else:
                raise TypeError(
                    "output of the engine should be of type float or 0d torch.Tensor "
                    "or 1d torch.Tensor with 1 element, "
                    f"but got output of type {type(loss).__name__}"
                )
        loss = idist.all_reduce(loss)
        lr = self._lr_schedule.get_param()  # type: ignore[union-attr]
        self._history["lr"].append(lr)
        if trainer.state.iteration == 1:
            self._best_loss = loss
        else:
            if smooth_f > 0:
                loss = smooth_f * loss + (1 - smooth_f) * self._history["loss"][-1]
            if loss < self._best_loss:
                self._best_loss = loss
        self._history["loss"].append(loss)

        # Check if the loss has diverged; if it has, stop the trainer
        if self._history["loss"][-1] > diverge_th * self._best_loss:  # type: ignore[operator]
            self._diverge_flag = True
            self.logger.info("Stopping early, the loss has diverged")
            trainer.terminate()