From 2015494a43ed4e26fa0c758ad86a6c61774ddc03 Mon Sep 17 00:00:00 2001
From: wxp <lovewaz@126.com>
Date: 星期三, 16 四月 2025 18:32:04 +0800
Subject: [PATCH] #IC1P55 #C17PK 1.修改function_call模型调用方式,stream=false 2.修复AiWorkflowFunction中invoke方法未对Tinyflow设置provider
---
aiflowy-modules/aiflowy-module-ai/src/main/java/tech/aiflowy/ai/controller/AiBotController.java | 145 +++++++++++++++++++++++++++---------------------
1 files changed, 82 insertions(+), 63 deletions(-)
diff --git a/aiflowy-modules/aiflowy-module-ai/src/main/java/tech/aiflowy/ai/controller/AiBotController.java b/aiflowy-modules/aiflowy-module-ai/src/main/java/tech/aiflowy/ai/controller/AiBotController.java
index df91abc..7c31dd9 100644
--- a/aiflowy-modules/aiflowy-module-ai/src/main/java/tech/aiflowy/ai/controller/AiBotController.java
+++ b/aiflowy-modules/aiflowy-module-ai/src/main/java/tech/aiflowy/ai/controller/AiBotController.java
@@ -151,81 +151,100 @@
final Boolean[] needClose = {true};
- ServletRequestAttributes sra = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
+ if (!humanMessage.getFunctions().isEmpty()) {
+ try {
+ AiMessageResponse aiMessageResponse = llm.chat(historiesPrompt);
+ function_call(aiMessageResponse, emitter, needClose, historiesPrompt, llm, prompt);
+ } catch (Exception e) {
+ emitter.completeWithError(e);
+ }
- llm.chatStream(historiesPrompt, new StreamResponseListener() {
- @Override
- public void onMessage(ChatContext context, AiMessageResponse response) {
- try {
- RequestContextHolder.setRequestAttributes(sra, true);
- String content = response.getMessage().getContent();
- Object messageContent = response.getMessage();
- if (StringUtil.hasText(content)) {
- String jsonResult = JSON.toJSONString(messageContent);
- emitter.send(jsonResult);
+ if (needClose[0]) {
+ System.out.println("function chat complete");
+ emitter.complete();
+ }
+ } else {
+
+ llm.chatStream(historiesPrompt, new StreamResponseListener() {
+ @Override
+ public void onMessage(ChatContext context, AiMessageResponse response) {
+ try {
+
+ function_call(response, emitter, needClose, historiesPrompt, llm, prompt);
+ } catch (Exception e) {
+ emitter.completeWithError(e);
}
- List<FunctionCaller> functionCallers = response.getFunctionCallers();
- if (CollectionUtil.hasItems(functionCallers)) {
- needClose[0] = false;
- for (FunctionCaller functionCaller : functionCallers) {
- Object result = functionCaller.call();
- if (ObjectUtil.isNotEmpty(result)) {
+ }
- String newPrompt = "璇锋牴鎹互涓嬪唴瀹瑰洖绛旂敤鎴凤紝鍐呭鏄�:\n" + result + "\n 鐢ㄦ埛鐨勯棶棰樻槸锛�" + prompt;
- historiesPrompt.addMessageTemporary(new HumanMessage(newPrompt));
-
- llm.chatStream(historiesPrompt, new StreamResponseListener() {
- @Override
- public void onMessage(ChatContext context, AiMessageResponse response) {
- needClose[0] = true;
- String content = response.getMessage().getContent();
- Object messageContent = response.getMessage();
- if (StringUtil.hasText(content)) {
- String jsonResult = JSON.toJSONString(messageContent);
- emitter.send(jsonResult);
- }
- }
-
- @Override
- public void onStop(ChatContext context) {
- if (needClose[0]) {
- System.out.println("function chat complete");
- emitter.complete();
- }
- historiesPrompt.clearTemporaryMessages();
- }
-
- @Override
- public void onFailure(ChatContext context, Throwable throwable) {
- emitter.completeWithError(throwable);
- }
- });
- }
- }
+ @Override
+ public void onStop(ChatContext context) {
+ if (needClose[0]) {
+ System.out.println("normal chat complete");
+ emitter.complete();
}
- } catch (Exception e) {
- emitter.completeWithError(e);
}
- }
- @Override
- public void onStop(ChatContext context) {
- if (needClose[0]) {
- System.out.println("normal chat complete");
- emitter.complete();
+ @Override
+ public void onFailure(ChatContext context, Throwable throwable) {
+ emitter.completeWithError(throwable);
}
- }
-
- @Override
- public void onFailure(ChatContext context, Throwable throwable) {
- emitter.completeWithError(throwable);
- }
- });
+ });
+ }
return emitter;
}
+ private void function_call(AiMessageResponse aiMessageResponse, MySseEmitter emitter, Boolean[] needClose, HistoriesPrompt historiesPrompt, Llm llm, String prompt) {
+ ServletRequestAttributes sra = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
+ RequestContextHolder.setRequestAttributes(sra, true);
+ String content = aiMessageResponse.getMessage().getContent();
+ Object messageContent = aiMessageResponse.getMessage();
+ if (StringUtil.hasText(content)) {
+ String jsonResult = JSON.toJSONString(messageContent);
+ emitter.send(jsonResult);
+ }
+ List<FunctionCaller> functionCallers = aiMessageResponse.getFunctionCallers();
+ if (CollectionUtil.hasItems(functionCallers)) {
+ needClose[0] = false;
+ for (FunctionCaller functionCaller : functionCallers) {
+ Object result = functionCaller.call();
+ if (ObjectUtil.isNotEmpty(result)) {
+
+ String newPrompt = "璇锋牴鎹互涓嬪唴瀹瑰洖绛旂敤鎴凤紝鍐呭鏄�:\n" + result + "\n 鐢ㄦ埛鐨勯棶棰樻槸锛�" + prompt;
+ historiesPrompt.addMessageTemporary(new HumanMessage(newPrompt));
+
+ llm.chatStream(historiesPrompt, new StreamResponseListener() {
+ @Override
+ public void onMessage(ChatContext context, AiMessageResponse response) {
+ needClose[0] = true;
+ String content = response.getMessage().getContent();
+ Object messageContent = response.getMessage();
+ if (StringUtil.hasText(content)) {
+ String jsonResult = JSON.toJSONString(messageContent);
+ emitter.send(jsonResult);
+ }
+ }
+
+ @Override
+ public void onStop(ChatContext context) {
+ if (needClose[0]) {
+ System.out.println("function chat complete");
+ emitter.complete();
+ }
+ historiesPrompt.clearTemporaryMessages();
+ }
+
+ @Override
+ public void onFailure(ChatContext context, Throwable throwable) {
+ emitter.completeWithError(throwable);
+ }
+ });
+ }
+ }
+ }
+ }
+
private void appendWorkflowFunctions(BigInteger botId, HumanMessage humanMessage) {
QueryWrapper queryWrapper = QueryWrapper.create().eq(AiBotWorkflow::getBotId, botId);
List<AiBotWorkflow> aiBotWorkflows = aiBotWorkflowService.getMapper().selectListWithRelationsByQuery(queryWrapper);
--
Gitblit v1.8.0