deploy: Complete 0126-27 deployment - database upgrade, services update, code recovery

Major Changes:
- Database: Install pg_bigm/pgvector plugins, create test database
- Python service: v1.0 -> v1.1, add pymupdf4llm/openpyxl/pypandoc
- Node.js backend: v1.3 -> v1.7, fix pino-pretty and ES Module imports
- Frontend: v1.2 -> v1.3, skip TypeScript check for deployment
- Code recovery: Restore empty files from local backup

Technical Fixes:
- Fix pino-pretty error in production (conditional loading)
- Fix ES Module import paths (add .js extensions)
- Fix OSSAdapter TypeScript errors
- Update Prisma Schema (63 models, 16 schemas)
- Update environment variables (DATABASE_URL, EXTRACTION_SERVICE_URL, OSS)
- Remove deprecated variables (REDIS_URL, DIFY_API_URL, DIFY_API_KEY)

Documentation:
- Create 0126 deployment folder with 8 documents
- Update database development standards v2.0
- Update SAE deployment status records

Deployment Status:
- PostgreSQL: ai_clinical_research_test with plugins
- Python: v1.1 @ 172.17.173.84:8000
- Backend: v1.7 @ 172.17.173.89:3001
- Frontend: v1.3 @ 172.17.173.90:80

Tested: All services running successfully on SAE
This commit is contained in:
2026-01-27 08:13:27 +08:00
parent 01a17f1e6f
commit 2481b786d8
318 changed files with 5290 additions and 3216 deletions

View File

@@ -7,8 +7,8 @@
import { FastifyReply } from 'fastify';
import { v4 as uuidv4 } from 'uuid';
import type { OpenAIStreamChunk, StreamOptions, THINKING_TAGS } from './types';
import { logger } from '../logging/logger';
import type { OpenAIStreamChunk, StreamOptions, THINKING_TAGS } from './types.js';
import { logger } from '../logging/logger.js';
/**
* OpenAI 流式响应适配器
@@ -210,3 +210,5 @@ export function createOpenAIStreamAdapter(

View File

@@ -6,11 +6,11 @@
*/
import { FastifyReply } from 'fastify';
import { OpenAIStreamAdapter, createOpenAIStreamAdapter } from './OpenAIStreamAdapter';
import { StreamOptions, StreamCallbacks, THINKING_TAGS, OpenAIMessage } from './types';
import { LLMFactory } from '../llm/adapters/LLMFactory';
import type { Message as LLMMessage } from '../llm/adapters/types';
import { logger } from '../logging/logger';
import { OpenAIStreamAdapter, createOpenAIStreamAdapter } from './OpenAIStreamAdapter.js';
import { StreamOptions, StreamCallbacks, THINKING_TAGS, OpenAIMessage } from './types.js';
import { LLMFactory } from '../llm/adapters/LLMFactory.js';
import type { Message as LLMMessage } from '../llm/adapters/types.js';
import { logger } from '../logging/logger.js';
/**
* 深度思考标签处理结果
@@ -216,3 +216,5 @@ export async function streamChat(

View File

@@ -5,8 +5,8 @@
* 支持 Ant Design X 的 XRequest 直接消费
*/
export { OpenAIStreamAdapter, createOpenAIStreamAdapter } from './OpenAIStreamAdapter';
export { StreamingService, createStreamingService, streamChat } from './StreamingService';
export { OpenAIStreamAdapter, createOpenAIStreamAdapter } from './OpenAIStreamAdapter.js';
export { StreamingService, createStreamingService, streamChat } from './StreamingService.js';
export type {
OpenAIMessage,
@@ -14,9 +14,11 @@ export type {
StreamOptions,
StreamCallbacks,
SSEEventType,
} from './types';
} from './types.js';
export { THINKING_TAGS } from './types.js';
export { THINKING_TAGS } from './types';

View File

@@ -109,3 +109,5 @@ export type SSEEventType =