export async function cacheCreateFromChat()

in javascript/cache.js [109:166]


export async function cacheCreateFromChat() {
  // [START cache_create_from_chat]
  // Make sure to include the following import:
  // import {GoogleGenAI} from '@google/genai';
  const ai = new GoogleGenAI({ apiKey: process.env.GEMINI_API_KEY });
  const modelName = "gemini-1.5-flash-001";
  const systemInstruction = "You are an expert analyzing transcripts.";

  // Create a chat session with the system instruction.
  const chat = ai.chats.create({
    model: modelName,
    config: { systemInstruction: systemInstruction },
  });
  const filePath = path.join(media, "a11.txt");
  const document = await ai.files.upload({
    file: filePath,
    config: { mimeType: "text/plain" },
  });
  console.log("Uploaded file name:", document.name);

  let response = await chat.sendMessage({
    message: createUserContent([
      "Hi, could you summarize this transcript?",
      createPartFromUri(document.uri, document.mimeType),
    ]),
  });
  console.log("\n\nmodel:", response.text);

  response = await chat.sendMessage({
    message: "Okay, could you tell me more about the trans-lunar injection",
  });
  console.log("\n\nmodel:", response.text);

  // To cache the conversation so far, pass the chat history as the list of contents.
  const chatHistory = chat.getHistory();
  const cache = await ai.caches.create({
    model: modelName,
    config: {
      contents: chatHistory,
      systemInstruction: systemInstruction,
    },
  });

  // Continue the conversation using the cached content.
  const chatWithCache = ai.chats.create({
    model: modelName,
    config: { cachedContent: cache.name },
  });
  response = await chatWithCache.sendMessage({
    message:
      "I didn't understand that last part, could you explain it in simpler language?",
  });
  console.log("\n\nmodel:", response.text);
  // [END cache_create_from_chat]

  await ai.caches.delete({ name: cache.name });
  return response.text;
}