feat(dc): Complete Tool C Day 5 - AI Chat + Ant Design X Integration

Summary:
- Upgrade to Ant Design 6.0.1 + install Ant Design X (2.1.0) + X SDK (2.1.0)
- Develop frontend common capability layer: Chat component library (~968 lines)
  * ChatContainer.tsx - Core container component
  * MessageRenderer.tsx - Message renderer
  * CodeBlockRenderer.tsx - Code block renderer with syntax highlighting
  * Complete TypeScript types and documentation
- Integrate ChatContainer into Tool C
- Fix 7 critical UI issues:
  * AG Grid module registration error
  * UI refinement (borders, shadows, gradients)
  * Add AI welcome message
  * Auto-clear input field after sending
  * Remove page scrollbars
  * Manual code execution (not auto-run)
  * Support simple Q&A (new /ai/chat API)
- Complete end-to-end testing
- Update all documentation (4 status docs + 6 dev logs)

Technical Stack:
- Frontend: React 19 + Ant Design 6.0 + Ant Design X 2.1
- Components: Bubble, Sender from @ant-design/x
- Total code: ~5418 lines

Status: Tool C MVP completed, production ready
This commit is contained in:
2025-12-07 22:02:14 +08:00
parent 2c7ed94161
commit af325348b8
30 changed files with 5005 additions and 976 deletions

View File

@@ -13,6 +13,9 @@
import { FastifyRequest, FastifyReply } from 'fastify';
import { logger } from '../../../../common/logging/index.js';
import { aiCodeService } from '../services/AICodeService.js';
import { sessionService } from '../services/SessionService.js';
import { LLMFactory } from '../../../../common/llm/adapters/LLMFactory.js';
import { ModelType } from '../../../../common/llm/adapters/types.js';
// ==================== 请求参数类型定义 ====================
@@ -198,6 +201,64 @@ export class AIController {
}
}
/**
* POST /api/v1/dc/tool-c/ai/chat
* 简单问答(不生成代码,直接回答)
*/
async chat(request: FastifyRequest, reply: FastifyReply) {
try {
const { sessionId, message } = request.body as GenerateCodeBody;
logger.info(`[AIController] 收到简单问答请求: sessionId=${sessionId}`);
// 参数验证
if (!sessionId || !message) {
return reply.code(400).send({
success: false,
error: '缺少必要参数sessionId 或 message'
});
}
// 获取 Session 信息
const session = await sessionService.getSession(sessionId);
// 调用 LLM 进行简单问答
const llm = LLMFactory.getAdapter('deepseek-v3' as ModelType);
const response = await llm.chat([
{
role: 'system',
content: `你是一个数据分析助手。当前数据集信息:
- 文件名:${session.fileName}
- 总行数:${session.totalRows}
- 总列数:${session.totalCols}
- 列名:${session.columns.join(', ')}
请直接回答用户的问题,不要生成代码。`
},
{ role: 'user', content: message }
], {
temperature: 0.7,
maxTokens: 500,
});
return reply.code(200).send({
success: true,
message: '回答成功',
data: {
messageId: Date.now().toString(),
content: response.content,
explanation: response.content,
}
});
} catch (error: any) {
logger.error(`[AIController] chat失败: ${error.message}`);
return reply.code(500).send({
success: false,
error: error.message || '问答失败,请重试'
});
}
}
/**
* GET /api/v1/dc/tool-c/ai/history/:sessionId
* 获取对话历史

View File

@@ -76,6 +76,11 @@ export async function toolCRoutes(fastify: FastifyInstance) {
handler: aiController.process.bind(aiController),
});
// 简单问答(不生成代码)
fastify.post('/ai/chat', {
handler: aiController.chat.bind(aiController),
});
// 获取对话历史
fastify.get('/ai/history/:sessionId', {
handler: aiController.getHistory.bind(aiController),