| | |
| | | import com.alibaba.fastjson.serializer.SerializeConfig; |
| | | import com.jfinal.template.stat.ast.Break; |
| | | import com.mybatisflex.core.query.QueryWrapper; |
| | | import com.mybatisflex.core.table.TableInfo; |
| | | import com.mybatisflex.core.table.TableInfoFactory; |
| | | import io.milvus.param.R; |
| | | import org.slf4j.Logger; |
| | | import org.slf4j.LoggerFactory; |
| | | import org.springframework.http.HttpStatus; |
| | |
| | | import tech.aiflowy.common.ai.ChatManager; |
| | | import tech.aiflowy.common.ai.MySseEmitter; |
| | | import tech.aiflowy.common.domain.Result; |
| | | import tech.aiflowy.common.entity.LoginAccount; |
| | | import tech.aiflowy.common.satoken.util.SaTokenUtil; |
| | | import tech.aiflowy.common.util.StringUtil; |
| | | import tech.aiflowy.common.web.controller.BaseCurdController; |
| | |
| | | if (aiBot == null) { |
| | | return ChatManager.getInstance().sseEmitterForContent("机器人不存在"); |
| | | } |
| | | if (StringUtil.hasText(aiBot.getApiEndpoint())){ |
| | | // 情况1:aiBot自带大模型信息 |
| | | try { |
| | | // 从aiBot构建自定义LLM实现 |
| | | Llm llm = null; |
| | | if (llm == null) { |
| | | return ChatManager.getInstance().sseEmitterForContent("LLM获取为空"); |
| | | } |
| | | |
| | | Map<String, Object> llmOptions = aiBot.getLlmOptions(); |
| | | String systemPrompt = llmOptions != null ? (String) llmOptions.get("systemPrompt") : null; |
| | | AiLlm aiLlm = aiLlmService.getById(aiBot.getLlmId()); |
| | | AiBotMessageMemory memory = new AiBotMessageMemory(botId, SaTokenUtil.getLoginAccount().getId(), |
| | | sessionId, isExternalMsg, aiBotMessageService, aiBotConversationMessageMapper, |
| | | aiBotConversationMessageService); |
| | | |
| | | if (aiLlm == null) { |
| | | return ChatManager.getInstance().sseEmitterForContent("LLM不存在"); |
| | | } |
| | | final HistoriesPrompt historiesPrompt = new HistoriesPrompt(); |
| | | |
| | | Llm llm = aiLlm.toLlm(); |
| | | historiesPrompt.setMemory(memory); |
| | | |
| | | if (llm == null) { |
| | | return ChatManager.getInstance().sseEmitterForContent("LLM获取为空"); |
| | | } |
| | | HumanMessage humanMessage = new HumanMessage(prompt); |
| | | |
| | | AiBotMessageMemory memory = new AiBotMessageMemory(botId, SaTokenUtil.getLoginAccount().getId(), |
| | | sessionId, isExternalMsg, aiBotMessageService, aiBotConversationMessageMapper, |
| | | aiBotConversationMessageService); |
| | | // 添加插件相关的function calling |
| | | appendPluginToolFunction(botId, humanMessage); |
| | | |
| | | final HistoriesPrompt historiesPrompt = new HistoriesPrompt(); |
| | | if (systemPrompt != null) { |
| | | historiesPrompt.setSystemMessage(SystemMessage.of(systemPrompt)); |
| | | } |
| | | //添加工作流相关的 Function Calling |
| | | appendWorkflowFunctions(botId, humanMessage); |
| | | |
| | | historiesPrompt.setMemory(memory); |
| | | //添加知识库相关的 Function Calling |
| | | appendKnowledgeFunctions(botId, humanMessage); |
| | | |
| | | HumanMessage humanMessage = new HumanMessage(prompt); |
| | | historiesPrompt.addMessage(humanMessage); |
| | | |
| | | // 添加插件相关的function calling |
| | | appendPluginToolFunction(botId, humanMessage); |
| | | MySseEmitter emitter = new MySseEmitter((long) (1000 * 60 * 2)); |
| | | |
| | | //添加工作流相关的 Function Calling |
| | | appendWorkflowFunctions(botId, humanMessage); |
| | | final Boolean[] needClose = {true}; |
| | | |
| | | //添加知识库相关的 Function Calling |
| | | appendKnowledgeFunctions(botId, humanMessage); |
| | | |
| | | historiesPrompt.addMessage(humanMessage); |
| | | |
| | | MySseEmitter emitter = new MySseEmitter((long) (1000 * 60 * 2)); |
| | | |
| | | final Boolean[] needClose = {true}; |
| | | |
| | | ServletRequestAttributes sra = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); |
| | | // 统一使用流式处理,无论是否有 Function Calling |
| | | llm.chatStream(historiesPrompt, new StreamResponseListener() { |
| | | @Override |
| | | public void onMessage(ChatContext context, AiMessageResponse response) { |
| | | try { |
| | | RequestContextHolder.setRequestAttributes(sra, true); |
| | | if (response != null) { |
| | | // 检查是否需要触发 Function Calling |
| | | if (response.getFunctionCallers() != null && CollectionUtil.hasItems(response.getFunctionCallers())) { |
| | | needClose[0] = false; |
| | | function_call(response, emitter, needClose, historiesPrompt, llm, prompt, false); |
| | | } else { |
| | | // 强制流式返回,即使有 Function Calling 也先返回部分结果 |
| | | if (response.getMessage() != null) { |
| | | String content = response.getMessage().getContent(); |
| | | if (StringUtil.hasText(content)) { |
| | | emitter.send(JSON.toJSONString(response.getMessage())); |
| | | ServletRequestAttributes sra = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); |
| | | // 统一使用流式处理,无论是否有 Function Calling |
| | | llm.chatStream(historiesPrompt, new StreamResponseListener() { |
| | | @Override |
| | | public void onMessage(ChatContext context, AiMessageResponse response) { |
| | | try { |
| | | RequestContextHolder.setRequestAttributes(sra, true); |
| | | if (response != null) { |
| | | // 检查是否需要触发 Function Calling |
| | | if (response.getFunctionCallers() != null && CollectionUtil.hasItems(response.getFunctionCallers())) { |
| | | needClose[0] = false; |
| | | function_call(response, emitter, needClose, historiesPrompt, llm, prompt, false); |
| | | } else { |
| | | // 强制流式返回,即使有 Function Calling 也先返回部分结果 |
| | | if (response.getMessage() != null) { |
| | | String content = response.getMessage().getContent(); |
| | | if (StringUtil.hasText(content)) { |
| | | emitter.send(JSON.toJSONString(response.getMessage())); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | |
| | | } catch (Exception e) { |
| | | emitter.completeWithError(e); |
| | | } |
| | | } |
| | | |
| | | @Override |
| | | public void onStop(ChatContext context) { |
| | | if (needClose[0]) { |
| | | emitter.complete(); |
| | | } |
| | | } |
| | | |
| | | } catch (Exception e) { |
| | | emitter.completeWithError(e); |
| | | @Override |
| | | public void onFailure(ChatContext context, Throwable throwable) { |
| | | emitter.completeWithError(throwable); |
| | | } |
| | | }); |
| | | |
| | | return emitter; |
| | | } catch (Exception e) { |
| | | return ChatManager.getInstance().sseEmitterForContent("自定义LLM配置错误"); |
| | | } |
| | | }else{ |
| | | Map<String, Object> llmOptions = aiBot.getLlmOptions(); |
| | | String systemPrompt = llmOptions != null ? (String) llmOptions.get("systemPrompt") : null; |
| | | AiLlm aiLlm = aiLlmService.getById(aiBot.getLlmId()); |
| | | |
| | | if (aiLlm == null) { |
| | | return ChatManager.getInstance().sseEmitterForContent("LLM不存在"); |
| | | } |
| | | |
| | | Llm llm = aiLlm.toLlm(); |
| | | if (llm == null) { |
| | | return ChatManager.getInstance().sseEmitterForContent("LLM获取为空"); |
| | | } |
| | | |
| | | AiBotMessageMemory memory = new AiBotMessageMemory(botId, SaTokenUtil.getLoginAccount().getId(), |
| | | sessionId, isExternalMsg, aiBotMessageService, aiBotConversationMessageMapper, |
| | | aiBotConversationMessageService); |
| | | |
| | | final HistoriesPrompt historiesPrompt = new HistoriesPrompt(); |
| | | if (systemPrompt != null) { |
| | | historiesPrompt.setSystemMessage(SystemMessage.of(systemPrompt)); |
| | | } |
| | | |
| | | historiesPrompt.setMemory(memory); |
| | | |
| | | HumanMessage humanMessage = new HumanMessage(prompt); |
| | | |
| | | // 添加插件相关的function calling |
| | | appendPluginToolFunction(botId, humanMessage); |
| | | |
| | | //添加工作流相关的 Function Calling |
| | | appendWorkflowFunctions(botId, humanMessage); |
| | | |
| | | //添加知识库相关的 Function Calling |
| | | appendKnowledgeFunctions(botId, humanMessage); |
| | | |
| | | historiesPrompt.addMessage(humanMessage); |
| | | |
| | | MySseEmitter emitter = new MySseEmitter((long) (1000 * 60 * 2)); |
| | | |
| | | final Boolean[] needClose = {true}; |
| | | |
| | | ServletRequestAttributes sra = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes(); |
| | | // 统一使用流式处理,无论是否有 Function Calling |
| | | llm.chatStream(historiesPrompt, new StreamResponseListener() { |
| | | @Override |
| | | public void onMessage(ChatContext context, AiMessageResponse response) { |
| | | try { |
| | | RequestContextHolder.setRequestAttributes(sra, true); |
| | | if (response != null) { |
| | | // 检查是否需要触发 Function Calling |
| | | if (response.getFunctionCallers() != null && CollectionUtil.hasItems(response.getFunctionCallers())) { |
| | | needClose[0] = false; |
| | | function_call(response, emitter, needClose, historiesPrompt, llm, prompt, false); |
| | | } else { |
| | | // 强制流式返回,即使有 Function Calling 也先返回部分结果 |
| | | if (response.getMessage() != null) { |
| | | String content = response.getMessage().getContent(); |
| | | if (StringUtil.hasText(content)) { |
| | | emitter.send(JSON.toJSONString(response.getMessage())); |
| | | } |
| | | } |
| | | } |
| | | } |
| | | |
| | | |
| | | } catch (Exception e) { |
| | | emitter.completeWithError(e); |
| | | } |
| | | } |
| | | } |
| | | |
| | | @Override |
| | | public void onStop(ChatContext context) { |
| | | if (needClose[0]) { |
| | | emitter.complete(); |
| | | @Override |
| | | public void onStop(ChatContext context) { |
| | | if (needClose[0]) { |
| | | emitter.complete(); |
| | | } |
| | | } |
| | | } |
| | | |
| | | @Override |
| | | public void onFailure(ChatContext context, Throwable throwable) { |
| | | emitter.completeWithError(throwable); |
| | | } |
| | | }); |
| | | @Override |
| | | public void onFailure(ChatContext context, Throwable throwable) { |
| | | emitter.completeWithError(throwable); |
| | | } |
| | | }); |
| | | |
| | | return emitter; |
| | | return emitter; |
| | | } |
| | | |
| | | } |
| | | |
| | | /** |