From 8b07a3f822bd63f6af230d4e25f199a741e807a8 Mon Sep 17 00:00:00 2001 From: AI Clinical Dev Team Date: Fri, 10 Oct 2025 22:16:30 +0800 Subject: [PATCH] fix: increase conversation history from 10 to 100 messages MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previous limit was too conservative: - Old: 10 messages (5 conversation turns) 鉂?Too limited - New: 100 messages (50 conversation turns) 鉁?Reasonable Context capacity comparison: - DeepSeek-V3: 64K tokens 鈮?100-200 turns - Qwen3-72B: 128K tokens 鈮?200-400 turns - Previous 10 messages was only using ~1% of capacity Real usage scenarios: - Quick consultation: 5-10 turns - In-depth discussion: 20-50 turns 鉁?Common - Complete research design: 50-100 turns The new 100-message limit covers 99% of real use cases while staying well within model token limits. --- backend/src/services/conversationService.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/src/services/conversationService.ts b/backend/src/services/conversationService.ts index 0a0940eb..ca1e9dec 100644 --- a/backend/src/services/conversationService.ts +++ b/backend/src/services/conversationService.ts @@ -143,7 +143,8 @@ export class ConversationService { // 获取系统Prompt const systemPrompt = agentService.getSystemPrompt(agentId); - // 获取历史消息(最近10条) + // 获取历史消息(最近100条,约50轮对话) + // DeepSeek-V3支持64K tokens,实际可容纳100-200轮对话 const historyMessages = await prisma.message.findMany({ where: { conversationId, @@ -151,7 +152,7 @@ export class ConversationService { orderBy: { createdAt: 'desc', }, - take: 10, + take: 100, }); // 反转顺序(最早的在前)