public Quote findRandomQuote()

in ai-patterns/spring-ai-quotes-llm-in-gke/src/main/java/com/example/quotes/domain/QuoteLLMInVertexService.java [34:89]


  public Quote findRandomQuote() {
    SystemMessage systemMessage = new SystemMessage("""
        You are a helpful AI assistant. 
        You are an AI assistant that helps people find information.
        You should reply to the user's request with your name and also in the style of a literary professor.
        """);
    UserMessage userMessage = new UserMessage("""
        Answer precisely; please provide a quote from a random book, 
        including only book, quote and author; do not repeat quotes from the same book
        return only 3 values, the book, the quote and the author, strictly in JSON format, wrapped in tripe backquotes ```json```, while eliminating every other text
        """);

    // ChatResponse chatResponse = chatClient.call(new Prompt(List.of(systemMessage, userMessage),
    //     VertexAiGeminiChatOptions.builder()
    //         .withTemperature(0.4f)
    //         .withModel(env.getProperty("spring.ai.openai.vertex.ai.chat.options.model"))
    //         .build())
    // );
    // Generation generation = chatResponse.getResult();
    // String input = generation.getOutput().getContent();
    String baseURL = env.getProperty("spring.ai.openai.vertex.ai.chat.base-url");
    String completionsPath = env.getProperty("spring.ai.openai.vertex.ai.chat.completions-path");
    String model = env.getProperty("spring.ai.openai.vertex.ai.chat.options.model");
    String token = null;
    try {
      token = getOauth2Token(baseURL + completionsPath);
    } catch (IOException e) {
      Quote quote = new Quote();
      quote.setQuote("Quote generationm failure; please retry");
      quote.setAuthor("N/A");
      quote.setBook("N/A");
      return quote;
    }

    OpenAiApi openAiApi = new OpenAiApi(baseURL, token, completionsPath,
        "/v1/embeddings",
        RestClient.builder(),
        WebClient.builder(),
        RetryUtils.DEFAULT_RESPONSE_ERROR_HANDLER);

    OpenAiChatModel openAIGemini = new OpenAiChatModel(openAiApi);
    OpenAiChatOptions openAiChatOptions = OpenAiChatOptions.builder()
        .withTemperature(0.2)
        .withModel(model)
        .build();

    long start = System.currentTimeMillis();
    String input = openAIGemini.call(
            new Prompt(List.of(systemMessage,userMessage), openAiChatOptions))
            .getResult().getOutput().getContent();

    System.out.printf("\nLLM Model in VertexAI provided response: \n%s\n", input);
    System.out.printf("Call took %s ms", (System.currentTimeMillis() - start));

    return Quote.parseQuoteFromJson(input);
  }