async function callPredict()

in solution-overview/lib/gcp/geminihelper.js [22:92]


async function callPredict(codeBase) {
    /* Load config */
    let projectId = configEnv.getProject();
    let locationId = configFile.getLocation();
    let modelId = configFile.getModel();
    let temperature = parseFloat(configFile.getTemperature());
    let maxOutTokens = parseFloat(configFile.getMaxtokens());
  
    /* Initialize Vertex */
    const vertex_ai = new VertexAI({ project: projectId, location: locationId });
  
    /* System Information and History */
    const systemInstruction = contextFile.getMainContext() + codeBase;
    let completeHistory = [];
  
    /* Initialize Model */
    let generativeModel = vertex_ai.preview.getGenerativeModel({
      model: modelId,
      generationConfig: {
        'maxOutputTokens': maxOutTokens,
        'temperature': temperature,
        'topP': 0.95,
        //"response_mime_type": "application/json"
      },
      safety_settings: [
        {
            "category": "HARM_CATEGORY_HATE_SPEECH",
            "threshold": "BLOCK_ONLY_HIGH"
        },
        {
            "category": "HARM_CATEGORY_DANGEROUS_CONTENT",
            "threshold": "BLOCK_ONLY_HIGH"
        },
        {
            "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
            "threshold": "BLOCK_MEDIUM_AND_ABOVE"
        },
        {
            "category": "HARM_CATEGORY_HARASSMENT",
            "threshold": "BLOCK_MEDIUM_AND_ABOVE"
        }
    ],
      systemInstruction: {
        parts: [{ "text": systemInstruction }]
      },
    });
  
    const chat = generativeModel.startChat({ history: completeHistory });

    /* First Message */
    let firstMessage = contextFile.getOverviewContext() + codeBase;
    let streamResult = await chat.sendMessageStream(firstMessage);
    let currentResponse = (await streamResult.response).candidates[0].content.parts[0].text;
    completeHistory.push({content: firstMessage, role: "user"});
    completeHistory.push({content: currentResponse, role: "assistant"});

    /* Second Message */
    let secondMessage = contextFile.getMermaidContext();
    streamResult = await chat.sendMessageStream(secondMessage);
    currentResponse = (await streamResult.response).candidates[0].content.parts[0].text;
    completeHistory.push({content: secondMessage, role: "user"});
    completeHistory.push({content: currentResponse, role: "assistant"});

    /* Third Message */
    let thirdMessage = contextFile.getSolutionContext();
    streamResult = await chat.sendMessageStream(thirdMessage);
    currentResponse = (await streamResult.response).candidates[0].content.parts[0].text;
  
    /* return summary */
    return currentResponse;
  }