in javascript/count_tokens.js [60:108]
export async function tokensChat() {
// [START tokens_chat]
// Make sure to include the following import:
// import {GoogleGenAI} from '@google/genai';
const ai = new GoogleGenAI({ apiKey: process.env.GEMINI_API_KEY });
// Initial chat history.
const history = [
{ role: "user", parts: [{ text: "Hi my name is Bob" }] },
{ role: "model", parts: [{ text: "Hi Bob!" }] },
];
const chat = ai.chats.create({
model: "gemini-2.0-flash",
history: history,
});
// Count tokens for the current chat history.
const countTokensResponse = await ai.models.countTokens({
model: "gemini-2.0-flash",
contents: chat.getHistory(),
});
console.log(countTokensResponse.totalTokens);
const chatResponse = await chat.sendMessage({
message: "In one sentence, explain how a computer works to a young child.",
});
console.log(chatResponse.usageMetadata);
// Add an extra user message to the history.
const extraMessage = {
role: "user",
parts: [{ text: "What is the meaning of life?" }],
};
const combinedHistory = chat.getHistory();
combinedHistory.push(extraMessage);
const combinedCountTokensResponse = await ai.models.countTokens({
model: "gemini-2.0-flash",
contents: combinedHistory,
});
console.log(
"Combined history token count:",
combinedCountTokensResponse.totalTokens,
);
// [END tokens_chat]
return {
historyTokenCount: countTokensResponse.totalTokens,
usage: chatResponse.usageMetadata,
combinedTokenCount: combinedCountTokensResponse.totalTokens,
};
}