in google/generativeai/generative_models.py [0:0]
def history(self) -> list[protos.Content]:
"""The chat history."""
last = self._last_received
if last is None:
return self._history
if last.candidates[0].finish_reason not in (
protos.Candidate.FinishReason.FINISH_REASON_UNSPECIFIED,
protos.Candidate.FinishReason.STOP,
protos.Candidate.FinishReason.MAX_TOKENS,
):
error = generation_types.StopCandidateException(last.candidates[0])
last._error = error
if last._error is not None:
raise generation_types.BrokenResponseError(
"Unable to build a coherent chat history due to a broken streaming response. "
"Refer to the previous exception for details. "
"To inspect the last response object, use `chat.last`. "
"To remove the last request/response `Content` objects from the chat, "
"call `last_send, last_received = chat.rewind()` and continue without it."
) from last._error
sent = self._last_sent
received = last.candidates[0].content
if not received.role:
received.role = _MODEL_ROLE
self._history.extend([sent, received])
self._last_sent = None
self._last_received = None
return self._history