def test_cache_create_from_chat()

in python/cache.py [0:0]


    def test_cache_create_from_chat(self):
        # [START cache_create_from_chat]
        from google import genai
        from google.genai import types

        client = genai.Client()
        model_name = "gemini-1.5-flash-001"
        system_instruction = "You are an expert analyzing transcripts."

        # Create a chat session with the given system instruction.
        chat = client.chats.create(
            model=model_name,
            config=types.GenerateContentConfig(system_instruction=system_instruction),
        )
        document = client.files.upload(file=media / "a11.txt")

        response = chat.send_message(
            message=["Hi, could you summarize this transcript?", document]
        )
        print("\n\nmodel:  ", response.text)
        response = chat.send_message(
            message=["Okay, could you tell me more about the trans-lunar injection"]
        )
        print("\n\nmodel:  ", response.text)

        # To cache the conversation so far, pass the chat history as the list of contents.
        cache = client.caches.create(
            model=model_name,
            config={
                "contents": chat.get_history(),
                "system_instruction": system_instruction,
            },
        )
        # Continue the conversation using the cached content.
        chat = client.chats.create(
            model=model_name,
            config=types.GenerateContentConfig(cached_content=cache.name),
        )
        response = chat.send_message(
            message="I didn't understand that last part, could you explain it in simpler language?"
        )
        print("\n\nmodel:  ", response.text)
        # [END cache_create_from_chat]
        client.caches.delete(name=cache.name)