refactor(backend): incremental architecture evolution (Task 19)

- Add common/ layer for shared capabilities (LLM, RAG, document, middleware)
- Add legacy/ layer for existing business code
- Move files to new structure (controllers, routes, services)
- Update index.ts for new route registration
- System remains fully functional
This commit is contained in:
2025-11-16 15:42:44 +08:00
parent 8a17dc80ae
commit 0c5310fb77
39 changed files with 3904 additions and 353 deletions

View File

@@ -0,0 +1,150 @@
import axios from 'axios';
import { ILLMAdapter, Message, LLMOptions, LLMResponse, StreamChunk } from './types.js';
import { config } from '../../../config/env.js';
export class DeepSeekAdapter implements ILLMAdapter {
modelName: string;
private apiKey: string;
private baseURL: string;
constructor(modelName: string = 'deepseek-chat') {
this.modelName = modelName;
this.apiKey = config.deepseekApiKey || '';
this.baseURL = 'https://api.deepseek.com/v1';
if (!this.apiKey) {
throw new Error('DeepSeek API key is not configured');
}
}
// 非流式调用
async chat(messages: Message[], options?: LLMOptions): Promise<LLMResponse> {
try {
const response = await axios.post(
`${this.baseURL}/chat/completions`,
{
model: this.modelName,
messages: messages,
temperature: options?.temperature ?? 0.7,
max_tokens: options?.maxTokens ?? 2000,
top_p: options?.topP ?? 0.9,
stream: false,
},
{
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.apiKey}`,
},
timeout: 180000, // 180秒超时3分钟- 稿件评估需要更长时间
}
);
const choice = response.data.choices[0];
return {
content: choice.message.content,
model: response.data.model,
usage: {
promptTokens: response.data.usage.prompt_tokens,
completionTokens: response.data.usage.completion_tokens,
totalTokens: response.data.usage.total_tokens,
},
finishReason: choice.finish_reason,
};
} catch (error: unknown) {
console.error('DeepSeek API Error:', error);
if (axios.isAxiosError(error)) {
throw new Error(
`DeepSeek API调用失败: ${error.response?.data?.error?.message || error.message}`
);
}
throw error;
}
}
// 流式调用
async *chatStream(
messages: Message[],
options?: LLMOptions,
onChunk?: (chunk: StreamChunk) => void
): AsyncGenerator<StreamChunk, void, unknown> {
try {
const response = await axios.post(
`${this.baseURL}/chat/completions`,
{
model: this.modelName,
messages: messages,
temperature: options?.temperature ?? 0.7,
max_tokens: options?.maxTokens ?? 2000,
top_p: options?.topP ?? 0.9,
stream: true,
},
{
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.apiKey}`,
},
responseType: 'stream',
timeout: 60000,
}
);
const stream = response.data;
let buffer = '';
for await (const chunk of stream) {
buffer += chunk.toString();
const lines = buffer.split('\n');
buffer = lines.pop() || '';
for (const line of lines) {
const trimmedLine = line.trim();
if (!trimmedLine || trimmedLine === 'data: [DONE]') {
continue;
}
if (trimmedLine.startsWith('data: ')) {
try {
const jsonStr = trimmedLine.slice(6);
const data = JSON.parse(jsonStr);
const choice = data.choices[0];
const content = choice.delta?.content || '';
const streamChunk: StreamChunk = {
content: content,
done: choice.finish_reason === 'stop',
model: data.model,
};
if (choice.finish_reason === 'stop' && data.usage) {
streamChunk.usage = {
promptTokens: data.usage.prompt_tokens,
completionTokens: data.usage.completion_tokens,
totalTokens: data.usage.total_tokens,
};
}
if (onChunk) {
onChunk(streamChunk);
}
yield streamChunk;
} catch (parseError) {
console.error('Failed to parse SSE data:', parseError);
}
}
}
}
} catch (error) {
console.error('DeepSeek Stream Error:', error);
if (axios.isAxiosError(error)) {
throw new Error(
`DeepSeek流式调用失败: ${error.response?.data?.error?.message || error.message}`
);
}
throw error;
}
}
}