2025-05-29 02:32:31 [XNIO-1 task-3] INFO o.d.c.w.i.PlusWebInvokeTimeInterceptor
- [PLUS]开始请求 => URL[POST /platform/chat/streamLLM],参数类型[json],参数:[{"model":"qwen","input":"讲的什么?","filePath":"http://fileeds.finchina.com:12001/wpspreview/mkplatform/2025/05/14/538a2f695f1a4aad99155327b59a43b1.html","fileGuid":"bcf418b1-b45f-46c2-a576-d47ac3e984c4","projectId":3651,"chatType":1}]
Consume Time:83 ms 2025-05-29 02:32:31
Execute SQL:SELECT id,guid,file_name,source_path,target_path,original_path,state,public_state,memo,tmstamp,is_del,project_id,session_id,create_dept,create_by,create_time,update_by,update_time FROM mk_files WHERE (guid = 'bcf418b1-b45f-46c2-a576-d47ac3e984c4')
Consume Time:17 ms 2025-05-29 02:32:32
Execute SQL:INSERT INTO mk_prompts ( id, guid, project_id, type, content, files_guid, model_type, create_dept, create_by, create_time, update_by, update_time ) VALUES ( 1927915933471264769, '3cd0828a-b265-4221-bbe1-09cc7ad88e7f', 3651, 0, '讲的什么?', 'bcf418b1-b45f-46c2-a576-d47ac3e984c4', 'qwen', 103, 1, '2025-05-29 02:32:32', 1, '2025-05-29 02:32:32' )
Consume Time:78 ms 2025-05-29 02:32:32
Execute SQL:INSERT INTO mk_prompts ( id, guid, project_id, type, files_guid, prompts_guid, model_type, create_dept, create_by, create_time, update_by, update_time ) VALUES ( 1927915933622259713, '9ddef2c6-f774-435e-b901-f4982f9bb0a0', 3651, 1, 'bcf418b1-b45f-46c2-a576-d47ac3e984c4', '3cd0828a-b265-4221-bbe1-09cc7ad88e7f', 'qwen', 103, 1, '2025-05-29 02:32:32', 1, '2025-05-29 02:32:32' )
2025-05-29 02:33:04 [ForkJoinPool.commonPool-worker-1] WARN o.d.c.m.h.InjectionMetaObjectHandler
- 自动注入警告 => 用户未登录
2025-05-29 02:33:04 [ForkJoinPool.commonPool-worker-1] ERROR o.d.p.controller.MkChatController
- Error in streamLLM
java.lang.RuntimeException: {"status": "ERROR","error_msg": "处理响应时发生错误:org.mybatis.spring.MyBatisSystemException"}
at org.dromara.platform.service.impl.MkChatServiceImpl.lambda$requestLLMStream$2(MkChatServiceImpl.java:278)
at java.base/java.util.concurrent.CompletableFuture$UniAccept.tryFire(CompletableFuture.java:718)
at java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:510)
at java.base/java.util.concurrent.CompletableFuture.postFire(CompletableFuture.java:614)
at java.base/java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:844)
at java.base/java.util.concurrent.CompletableFuture$Completion.exec(CompletableFuture.java:483)
at java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:373)
at java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1182)
at java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1655)
at java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1622)
at java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:165)
这个是报错,以下是代码
@SaIgnore
@ResponseBody
@PostMapping(value = "/streamLLM", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<String> streamLLM(@RequestBody PromptBo promptBo) throws Exception {
return chatService.getAnswerByFlux(promptBo)
.onErrorResume(e -> {
log.error("Error in streamLLM", e);
return Flux.just(e.getMessage());
});
}
@Override
public Flux<String> getAnswerByFlux(PromptBo promptBo) throws Exception {
String requestURL = LLMUrl;
if(promptBo != null && promptBo.getModel() != null){
if(promptBo.getModel().equals("deepseek-v3")||promptBo.getModel().equals("deepseek-r1")||promptBo.getModel().equals("deepseek-r1-volc")){
requestURL = LLMUrlDS;
}
}
String content;
Long projectId = promptBo.getProjectId();
if (projectId == null){
return Flux.error(new RuntimeException("项目id不能为空"));
}
String questionGuid = UUID.randomUUID().toString();
MkPromptsBo question = new MkPromptsBo(questionGuid,projectId,0,promptBo.getModel()); //入库问题
MkPromptsBo answer = new MkPromptsBo(UUID.randomUUID().toString(),projectId,1,questionGuid,promptBo.getModel()); // 入库答案
MkPromptsBo think = new MkPromptsBo(UUID.randomUUID().toString(),projectId,2,questionGuid,promptBo.getModel()); // think
if (promptBo.getChatType() == 1 || promptBo.getChatType() == 2){
// 全文问答或局部检索问答
MkFiles file = filesService.queryTargetPathByGuid(promptBo.getFileGuid());
String fileType = getFileExtension(file.getSourcePath(),file.getFileName());
R<String> r = getFileContent(fileType, file);
if (r.getCode() == R.SUCCESS){
content = r.getData();
}
else {
return Flux.error(new RuntimeException("{\"status\": \"ERROR\",\"error_msg\": \"" + r.getMsg() + "\"}"));
}
}
else if(promptBo.getChatType() == 0) {
// 局部问答
content = promptBo.getContent()==null?"":promptBo.getContent();
}
else{
return Flux.error(new RuntimeException("chatType参数有误"));
}
question.setFilesGuid(promptBo.getFileGuid());
question.setContent(promptBo.getInput());
answer.setFilesGuid(promptBo.getFileGuid());
think.setFilesGuid(promptBo.getFileGuid());
promptsService.insertByBo(question);
promptsService.insertByBo(answer);
if(promptBo.getModel().equals("deepseek-r1")||promptBo.getModel().equals("deepseek-r1-volc")){
promptsService.insertByBo(think);
}
// 向大模型提问
String promptStr = "You are a helpful assistant designed to output JSON.";
PromptRequest requestModel = new PromptRequest(promptBo.getModel(),content,promptBo.getInput(),promptStr,true);
// 当为局部检索问答时需要设置section_qa参数
if (promptBo.getChatType() == 2 ){
MkProjectsVo mkProjectsVo = projectsService.queryById(projectId);
ObjectMapper objectMapper = new ObjectMapper();
List<String> keywords = objectMapper.readValue(mkProjectsVo.getKeywords(), new com.fasterxml.jackson.core.type.TypeReference<>() {
});
requestModel.setSection_qa(new SectionQa(keywords,mkProjectsVo.getDirection(),mkProjectsVo.getOffsets()));
}
ObjectMapper mapper = new ObjectMapper();
String requestStr;
try {
requestStr = mapper.writeValueAsString(requestModel);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return requestLLMStream(requestStr,promptBo,question,answer,think,requestURL);
}
private Flux<String> requestLLMStream(String requestStr,PromptBo promptBo,MkPromptsBo question,MkPromptsBo answer,MkPromptsBo think,String url){
HttpClient client = HttpClient.newHttpClient();
HttpRequest request;
try {
request = HttpRequest.newBuilder()
.uri(new URI(url))
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(requestStr))
.build();
} catch (Exception e) {
return Flux.create(sink -> {
sink.next("{\"status\": \"ERROR\",\"error_msg\": \"构建大模型请求出现错误!\"}");
sink.complete();
});
}
return Flux.create(sink -> {
CompletableFuture<HttpResponse<InputStream>> responseFuture = client.sendAsync(request, HttpResponse.BodyHandlers.ofInputStream());
responseFuture
.completeOnTimeout(null, 60, TimeUnit.SECONDS) // 60秒超时
.thenAccept(response -> {
if (response == null) {
sink.next("{\"status\": \"ERROR\",\"error_msg\": \"请求超时\"}");
sink.complete();
return;
}
int code = response.statusCode();
if (code != 200) {
sink.next("{\"status\": \"ERROR\",\"error_msg\": \"请求出现问题,请联系管理员!状态码:"+code+"\"}");
sink.complete();
return;
}
String idMessage = String.format("{\"type\":\"ids\",\"questionId\":\"%s\",\"answerId\":\"%s\"}\n\n", question.getId(), answer.getId());
sink.next(idMessage);
AtomicBoolean isClientDisconnected = new AtomicBoolean(false);
sink.onCancel(() -> {
isClientDisconnected.set(true);
});
try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.body(), StandardCharsets.UTF_8))) {
String line;
StringBuilder output = new StringBuilder();
StringBuilder thinkString = new StringBuilder();
ObjectMapper mapper2 = new ObjectMapper();
HashMap<String,String> processedLines = new HashMap<>(); // 用于记录"model"
while ((line = reader.readLine()) != null && !isClientDisconnected.get()) {
if(!line.isBlank()){
line = line.replaceFirst("data:","");
if ("[DONE]".equals(line.trim())) {
break;
}
processJsonLine(processedLines,line, output, thinkString,mapper2);
}
sink.next(line);
}
// 完整答案拼接完成后,一次性入库
String fullAnswer = output.toString();
answer.setContent(fullAnswer);
answer.setModelType(processedLines.get("model"));
if(processedLines.containsKey("page_range")&&!processedLines.get("page_range").equals("null")){
answer.setPageRange(processedLines.get("page_range"));
}
if(promptBo.getModel().equals("deepseek-r1")||promptBo.getModel().equals("deepseek-r1-volc")) {
think.setContent(thinkString.toString());
think.setModelType(processedLines.get("model"));
think.setPageRange(processedLines.get("page_range"));
promptsService.updateByBo(think);
}
question.setModelType(processedLines.get("model"));
promptsService.updateByBo(answer);
promptsService.updateByBo(question);
if (!isClientDisconnected.get()) {
// 如果客户端没有断开连接,发送 [DONE] 信号
sink.next("[DONE]");
}
sink.complete();
} catch (Exception e) {
sink.error(new RuntimeException("{\"status\": \"ERROR\",\"error_msg\": \"处理响应时发生错误:"+e+"\"}"));
}
})
.exceptionally(e -> {
if (e instanceof IOException) {
sink.next("{\"status\": \"ERROR\",\"error_msg\": \"模型调用请求失败:"+ e.getMessage()+"\"}");
} else if (e instanceof InterruptedException) {
sink.next("{\"status\": \"ERROR\",\"error_msg\": \"请求模型连接被中断\"}");
} else {
sink.next("{\"status\": \"ERROR\",\"error_msg\": \"发生未知错误:"+ e.getMessage()+"\"}");
}
sink.complete();
return null;
});
});
}
@Override
public Boolean updateByBo(MkPromptsBo bo) {
MkPrompts update = MapstructUtils.convert(bo, MkPrompts.class);
validEntityBeforeSave(update);
return baseMapper.updateById(update) > 0;
}
-- public.mk_prompts definition
-- Drop table
-- DROP TABLE public.mk_prompts;
CREATE TABLE public.mk_prompts (
id bigserial NOT NULL, -- 流水号
guid varchar(50) NULL,
project_id int8 NULL, -- 项目表的id
"type" int4 NULL, -- 记录类型 枚举值:0-问题;1-答案
"content" text NULL, -- type=0时为问题内容,type=1时为回答内容
files_guid varchar(50) NULL, -- 对应文件guid
prompts_guid varchar(50) NULL, -- 对应问题的guid,type=1时有值
model_type varchar(200) NULL, -- 选择的模型
create_time date DEFAULT CURRENT_DATE NULL, -- 创建时间
update_time date DEFAULT CURRENT_DATE NULL, -- 更新时间
tmstamp varchar DEFAULT CURRENT_TIMESTAMP NULL, -- 时间戳
create_dept int8 NULL, -- 创建部门
create_by int8 NULL, -- 创建用户
update_by int8 NULL, -- 更新用户
page_range varchar(200) NULL, -- 关键词问答返回请求 {关键词: [1,2]} 比如:{"市净率":[0,1,2,3,4,5]}
CONSTRAINT mk_prompts_pk PRIMARY KEY (id)
);
-- Column comments
COMMENT ON COLUMN public.mk_prompts.id IS '流水号';
COMMENT ON COLUMN public.mk_prompts.project_id IS '项目表的id';
COMMENT ON COLUMN public.mk_prompts."type" IS '记录类型 枚举值:0-问题;1-答案';
COMMENT ON COLUMN public.mk_prompts."content" IS 'type=0时为问题内容,type=1时为回答内容';
COMMENT ON COLUMN public.mk_prompts.files_guid IS '对应文件guid';
COMMENT ON COLUMN public.mk_prompts.prompts_guid IS '对应问题的guid,type=1时有值';
COMMENT ON COLUMN public.mk_prompts.model_type IS '选择的模型';
COMMENT ON COLUMN public.mk_prompts.create_time IS '创建时间';
COMMENT ON COLUMN public.mk_prompts.update_time IS '更新时间';
COMMENT ON COLUMN public.mk_prompts.tmstamp IS '时间戳';
COMMENT ON COLUMN public.mk_prompts.create_dept IS '创建部门';
COMMENT ON COLUMN public.mk_prompts.create_by IS '创建用户';
COMMENT ON COLUMN public.mk_prompts.update_by IS '更新用户';
COMMENT ON COLUMN public.mk_prompts.page_range IS '关键词问答返回请求 {关键词: [1,2]} 比如:{"市净率":[0,1,2,3,4,5]}';
我本地启动时这个接口不会出现这个错误,一旦部署到服务器上就出现了这个错误而且就这一个接口,真的是太奇怪了,帮我看看是什么问题?