public LLMNodeData parse()

in spring-ai-alibaba-graph/spring-ai-alibaba-graph-studio/src/main/java/com/alibaba/cloud/ai/service/dsl/nodes/LLMNodeDataConverter.java [58:123]


			public LLMNodeData parse(Map<String, Object> data) {
				List<VariableSelector> inputs = new ArrayList<>();
				// convert prompt template
				Map<String, Object> context = (Map<String, Object>) data.get("context");
				List<Map<String, Object>> difyTmplList;
				if (data.get("prompt_template") instanceof List<?>) {
					difyTmplList = (List<Map<String, Object>>) data.get("prompt_template");
				}
				else {
					difyTmplList = List.of((Map<String, Object>) data.get("prompt_template"));
				}
				List<LLMNodeData.PromptTemplate> tmplList = new ArrayList<>();
				if ((Boolean) context.get("enabled")) {
					List<String> variableSelector = (List<String>) context.get("variable_selector");
					String systemText = (String) difyTmplList.get(0).get("text");
					String replacement = systemText.replace("{{#context#}}",
							variableSelector.get(0) + "." + variableSelector.get(1));
					difyTmplList.get(0).put("text", replacement);
				}
				for (Map<String, Object> promptTmpl : difyTmplList) {
					List<String> variables = new ArrayList<>();
					String tmpl = StringTemplateUtil.fromDifyTmpl((String) promptTmpl.get("text"), variables);
					variables.forEach(variable -> {
						String[] splits = variable.split("\\.", 2);
						inputs.add(new VariableSelector(splits[0], splits[1]));
					});
					String role = promptTmpl.containsKey("role") ? (String) promptTmpl.get("role") : "system";
					tmplList.add(new LLMNodeData.PromptTemplate(role, tmpl));
				}
				// convert model config
				Map<String, Object> modelData = (Map<String, Object>) data.get("model");
				ObjectMapper objectMapper = new ObjectMapper();
				objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
				objectMapper.setPropertyNamingStrategy(PropertyNamingStrategies.LOWER_CASE);
				LLMNodeData.ModelConfig modelConfig = new LLMNodeData.ModelConfig()
					.setMode((String) modelData.get("mode"))
					.setName((String) modelData.get("name"))
					.setProvider((String) modelData.get("provider"))
					.setCompletionParams(objectMapper.convertValue(modelData.get("completion_params"),
							LLMNodeData.CompletionParams.class));

				LLMNodeData.MemoryConfig memoryConfig = new LLMNodeData.MemoryConfig();
				// convert memory config
				if (data.containsKey("memory")) {
					List<String> variables = new ArrayList<>();
					Map<String, Object> memoryData = (Map<String, Object>) data.get("memory");
					String lastMessageTemplate = (String) memoryData.get("query_prompt_template");
					lastMessageTemplate = StringTemplateUtil.fromDifyTmpl(lastMessageTemplate, variables);
					variables.forEach(variable -> {
						String[] splits = variable.split("\\.", 2);
						inputs.add(new VariableSelector(splits[0], splits[1]));
					});
					Map<String, Object> window = (Map<String, Object>) memoryData.get("window");
					Boolean windowEnabled = (Boolean) window.get("enabled");
					Integer windowSize = (Integer) window.get("size");
					memoryConfig.setEnabled(true)
						.setWindowEnabled(windowEnabled)
						.setWindowSize(windowSize)
						.setLastMessageTemplate(lastMessageTemplate)
						.setIncludeLastMessage(false);
				}

				return new LLMNodeData(inputs, List.of(LLMNodeData.DEFAULT_OUTPUT_SCHEMA)).setModel(modelConfig)
					.setPromptTemplate(tmplList)
					.setMemoryConfig(memoryConfig);
			}