feat(ssa): Complete T-test end-to-end testing with 9 bug fixes - Phase 1 core 85% complete. R service: missing value auto-filter. Backend: error handling, variable matching, dynamic filename. Frontend: module activation, session isolation, error propagation. Full flow verified.
Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
76
backend/src/common/llm/adapters/LLMFactory.js
Normal file
76
backend/src/common/llm/adapters/LLMFactory.js
Normal file
@@ -0,0 +1,76 @@
|
||||
import { DeepSeekAdapter } from './DeepSeekAdapter.js';
|
||||
import { QwenAdapter } from './QwenAdapter.js';
|
||||
import { GPT5Adapter } from './GPT5Adapter.js';
|
||||
import { ClaudeAdapter } from './ClaudeAdapter.js';
|
||||
/**
|
||||
* LLM工厂类
|
||||
* 根据模型类型创建相应的适配器实例
|
||||
*/
|
||||
export class LLMFactory {
|
||||
/**
|
||||
* 获取LLM适配器实例(单例模式)
|
||||
* @param modelType 模型类型
|
||||
* @returns LLM适配器实例
|
||||
*/
|
||||
static getAdapter(modelType) {
|
||||
// 如果已经创建过该适配器,直接返回
|
||||
if (this.adapters.has(modelType)) {
|
||||
return this.adapters.get(modelType);
|
||||
}
|
||||
// 根据模型类型创建适配器
|
||||
let adapter;
|
||||
switch (modelType) {
|
||||
case 'deepseek-v3':
|
||||
adapter = new DeepSeekAdapter('deepseek-chat');
|
||||
break;
|
||||
case 'qwen3-72b':
|
||||
adapter = new QwenAdapter('qwen-max'); // ⭐ 使用 qwen-max(Qwen最新最强模型)
|
||||
break;
|
||||
case 'qwen-long':
|
||||
adapter = new QwenAdapter('qwen-long'); // 1M上下文超长文本模型
|
||||
break;
|
||||
case 'gpt-5':
|
||||
adapter = new GPT5Adapter(); // ⭐ 通过CloseAI代理,默认使用 gpt-5-pro
|
||||
break;
|
||||
case 'claude-4.5':
|
||||
adapter = new ClaudeAdapter('claude-sonnet-4-5-20250929'); // ⭐ 通过CloseAI代理
|
||||
break;
|
||||
case 'gemini-pro':
|
||||
// TODO: 实现Gemini适配器
|
||||
throw new Error('Gemini adapter is not implemented yet');
|
||||
default:
|
||||
throw new Error(`Unsupported model type: ${modelType}`);
|
||||
}
|
||||
// 缓存适配器实例
|
||||
this.adapters.set(modelType, adapter);
|
||||
return adapter;
|
||||
}
|
||||
/**
|
||||
* 清除适配器缓存
|
||||
* @param modelType 可选,指定清除某个模型的适配器,不传则清除所有
|
||||
*/
|
||||
static clearCache(modelType) {
|
||||
if (modelType) {
|
||||
this.adapters.delete(modelType);
|
||||
}
|
||||
else {
|
||||
this.adapters.clear();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* 检查模型是否支持
|
||||
* @param modelType 模型类型
|
||||
* @returns 是否支持
|
||||
*/
|
||||
static isSupported(modelType) {
|
||||
return ['deepseek-v3', 'qwen3-72b', 'qwen-long', 'gpt-5', 'claude-4.5', 'gemini-pro'].includes(modelType);
|
||||
}
|
||||
/**
|
||||
* 获取所有支持的模型列表
|
||||
* @returns 支持的模型列表
|
||||
*/
|
||||
static getSupportedModels() {
|
||||
return ['deepseek-v3', 'qwen3-72b', 'qwen-long', 'gpt-5', 'claude-4.5', 'gemini-pro'];
|
||||
}
|
||||
}
|
||||
LLMFactory.adapters = new Map();
|
||||
Reference in New Issue
Block a user