Selaa lähdekoodia

v0.0.3-拆分文件上传接口

WangXuMing 3 kuukautta sitten
vanhempi
sitoutus
ae0f1a1373

+ 4 - 2
.gitignore

@@ -48,7 +48,9 @@ coverage.xml
 # Translations
 *.mo
 *.pot
-
+*.pdf
+*.docs
+*.doc
 # Django stuff:
 *.log
 langfuse/
@@ -62,4 +64,4 @@ todo.md
 .design
 .claude
 .R&D
-temp\AI审查结果.json
+temp\AI审查结果.json

+ 16 - 0
core/base/workflow_manager.py

@@ -230,6 +230,14 @@ class WorkflowManager:
             # 通知SSE连接任务完成
             asyncio.run(self.progress_manager.complete_task(task_chain.callback_task_id))
 
+            # 清理Redis文件缓存
+            try:
+                from foundation.utils.redis_utils import delete_file_info
+                asyncio.run(delete_file_info(task_chain.file_id))
+                logger.info(f"已清理Redis文件缓存: {task_chain.file_id}")
+            except Exception as e:
+                logger.warning(f"清理Redis文件缓存失败: {str(e)}")
+
             logger.info(f"文档处理任务链完成: {task_chain.callback_task_id}")
             return task_chain.results
 
@@ -240,6 +248,14 @@ class WorkflowManager:
             # 清理任务注册
             asyncio.run(self.redis_duplicate_checker.unregister_task(task_chain.file_id))
 
+            # 清理Redis文件缓存(即使失败也清理)
+            try:
+                from foundation.utils.redis_utils import delete_file_info
+                asyncio.run(delete_file_info(task_chain.file_id))
+                logger.info(f"已清理Redis文件缓存: {task_chain.file_id}")
+            except Exception as cleanup_error:
+                logger.warning(f"清理Redis文件缓存失败: {str(cleanup_error)}")
+
             # 通知SSE连接任务失败
             error_result = {
                 "error": str(e),

+ 15 - 0
foundation/base/celery_app.py

@@ -47,6 +47,17 @@ app.conf.update(
     worker_concurrency=2,          # 每个worker进程数(文档处理较重,不宜过多)
     worker_pool='solo',           # 使用单线程模式(避免GIL问题)
 
+    # 网络和连接配置 - 防止30分钟断连
+    broker_connection_timeout=30,      # 连接超时30秒
+    broker_connection_retry=True,      # 启用连接重试
+    broker_connection_retry_on_startup=True,  # 启动时重试
+    broker_connection_max_retries=10,  # 最大重试次数
+    broker_heartbeat=60,               # 心跳间隔60秒(默认是30秒的2倍)
+    broker_transport_options={
+        'visibility_timeout': 3600,    # 任务可见性超时
+        'socket_keepalive': True,      # 启用socket keepalive
+    },
+
     # 任务配置
     task_track_started=True,
     task_time_limit=600,           # 10分钟超时(文档处理较慢)
@@ -55,6 +66,10 @@ app.conf.update(
 
     # 结果过期时间
     result_expires=3600,           # 1小时后过期
+
+    # 连接池配置
+    broker_pool_limit=None,        # 无连接池限制
+    result_backend_pool_limit=None, # 无结果后端连接池限制
 )
 
 # 初始化Celery trace系统

+ 1 - 1
foundation/base/redis_config.py

@@ -30,7 +30,7 @@ class RedisConfig:
 def load_config_from_env() -> tuple[RedisConfig]:
     """从环境变量加载配置"""
     redis_config = RedisConfig(
-        url=config_handler.get("redis", "REDIS_URL", "redis://127.0.0.1:6379"),
+        url=config_handler.get("redis", "REDIS_URL"),
         password=config_handler.get("redis", "REDIS_PASSWORD"),
         db=int(config_handler.get("redis", "REDIS_DB", "0")),
         max_connections=int(config_handler.get("redis", "REDIS_MAX_CONNECTIONS", "50"))

+ 121 - 10
foundation/base/redis_connection.py

@@ -8,17 +8,77 @@
 @Date       :2025/7/21 15:07
 '''
 import redis                     # 同步专用
-from redis import asyncio as aioredis
-
+# 尝试导入异步Redis模块
+try:
+    from redis import asyncio as redis_asyncio
+except ImportError:
+    try:
+        import aioredis as redis_asyncio
+    except ImportError:
+        raise ImportError("Neither redis.asyncio nor aioredis is available. Please install 'redis[asyncio]' or 'aioredis'")
+
+# 导入Redis异常类
+from redis.exceptions import ConnectionError as redis_ConnectionError
 
 from typing import Optional, Protocol, Dict, Any
+from functools import wraps
+import asyncio
 from foundation.base.redis_config import RedisConfig
 from foundation.base.redis_config import load_config_from_env
 from foundation.logger.loggering import server_logger
-from typing import Dict, Any, List
-from typing import Tuple, Optional
+from typing import Dict, Any, List, Tuple
 from langchain_community.storage import RedisStore
 
+
+def with_redis_retry(max_retries: int = 3, delay: float = 1.0):
+    """
+    Redis操作重连装饰器
+
+    Args:
+        max_retries: 最大重试次数,默认3次
+        delay: 重试间隔秒数,默认1秒
+    """
+    def decorator(func):
+        @wraps(func)
+        async def wrapper(self, *args, **kwargs):
+            last_exception = None
+
+            for attempt in range(max_retries + 1):  # +1 包含第一次尝试
+                try:
+                    return await func(self, *args, **kwargs)
+                except (ConnectionResetError, redis_ConnectionError) as e:
+                    last_exception = e
+
+                    if attempt < max_retries:
+                        server_logger.warning(
+                            f"Redis连接异常 (尝试 {attempt + 1}/{max_retries + 1}): {str(e)}"
+                        )
+
+                        # 尝试重连
+                        try:
+                            await self._reconnect()
+                        except Exception as reconnect_error:
+                            server_logger.error(f"Redis重连失败: {str(reconnect_error)}")
+                            # 如果重连失败,继续重试
+                            await asyncio.sleep(delay * (attempt + 1))  # 指数退避
+                            continue
+
+                        server_logger.info(f"Redis重连成功,重新执行操作")
+                        await asyncio.sleep(delay)  # 等待连接稳定
+                    else:
+                        server_logger.error(f"Redis操作失败,已达最大重试次数: {str(e)}")
+                        break
+                except Exception as e:
+                    # 非连接相关的异常直接抛出
+                    raise e
+
+            # 所有重试都失败了
+            raise last_exception
+
+        return wrapper
+    return decorator
+
+
 class RedisConnection(Protocol):
     """
     Redis 接口协议
@@ -64,23 +124,34 @@ class RedisAdapter(RedisConnection):
 
     async def connect(self):
         """创建Redis连接"""
-        self._redis = await aioredis.from_url(
+        # 简化的TCP Keep-Alive配置(兼容Windows系统)
+        socket_options = {
+            'socket_keepalive': True,
+            'socket_connect_timeout': 10,  # 连接超时10秒
+            'socket_timeout': 30,           # 读写超时30秒
+        }
+
+        # 使用新版本的redis.asyncio
+        self._redis = redis_asyncio.from_url(
             self.config.url,
             password=self.config.password,
             db=self.config.db,
             encoding="utf-8",
             decode_responses=True,
-            max_connections=self.config.max_connections
+            max_connections=self.config.max_connections,
+            **socket_options
         )
-        # 用于 langchain RedisStore 存储  
+
+        # 用于 langchain RedisStore 存储
         # 必须设为 False(LangChain 需要 bytes 数据)
-        self._langchain_redis_client = aioredis.from_url(
+        self._langchain_redis_client = redis_asyncio.from_url(
             self.config.url,
             password=self.config.password,
             db=self.config.db,
             encoding="utf-8",
             decode_responses=False,
-            max_connections=self.config.max_connections
+            max_connections=self.config.max_connections,
+            **socket_options
         )
        
         # ✅ 使用同步 Redis 客户端
@@ -100,44 +171,84 @@ class RedisAdapter(RedisConnection):
       
         return self
 
+    @with_redis_retry()
     async def get(self, key: str) -> Any:
+        """获取Redis键值"""
         return await self._redis.get(key)
 
+    @with_redis_retry()
     async def set(self, key: str, value: Any, ex: Optional[int] = None, nx: bool = False) -> bool:
+        """设置Redis键值"""
         return await self._redis.set(key, value, ex=ex, nx=nx)
 
+    @with_redis_retry()
+    async def setex(self, key: str, time: int, value: Any) -> bool:
+        """设置Redis键值并指定过期时间"""
+        return await self._redis.setex(key, time, value)
+
+    @with_redis_retry()
     async def hget(self, key: str, field: str) -> Any:
         return await self._redis.hget(key, field)
 
+    @with_redis_retry()
     async def hset(self, key: str, field: str, value: Any) -> int:
         return await self._redis.hset(key, field, value)
 
+    @with_redis_retry()
     async def hmset(self, key: str, mapping: Dict[str, Any]) -> bool:
         return await self._redis.hmset(key, mapping)
 
+    @with_redis_retry()
     async def hgetall(self, key: str) -> Dict[str, Any]:
         return await self._redis.hgetall(key)
 
+    @with_redis_retry()
     async def delete(self, *keys: str) -> int:
         return await self._redis.delete(*keys)
 
+    @with_redis_retry()
     async def exists(self, key: str) -> int:
         return await self._redis.exists(key)
 
+    @with_redis_retry()
     async def expire(self, key: str, seconds: int) -> bool:
         return await self._redis.expire(key, seconds)
 
+    @with_redis_retry()
     async def scan(self, cursor: int, match: Optional[str] = None, count: Optional[int] = None) -> tuple[
         int, list[str]]:
         return await self._redis.scan(cursor, match=match, count=count)
-    
+
+    @with_redis_retry()
     async def eval(self, script: str, numkeys: int, *keys_and_args: str) -> Any:
+        """执行Redis脚本"""
         return await self._redis.eval(script, numkeys, *keys_and_args) #  解包成独立参数
 
 
     def get_langchain_redis_client(self):
         return self._langchain_redis_client
 
+    async def _reconnect(self) -> None:
+        """重新连接Redis"""
+        try:
+            server_logger.info("正在重新连接Redis...")
+            if self._redis:
+                await self._redis.close()
+                await self._redis.wait_closed()
+            if self._langchain_redis_client:
+                await self._langchain_redis_client.close()
+                await self._langchain_redis_client.wait_closed()
+
+            # 等待短暂时间后重连
+            await asyncio.sleep(1)
+
+            # 重新建立连接
+            await self.connect()
+            server_logger.info("Redis重连成功")
+        except Exception as e:
+            server_logger.error(f"Redis重连失败: {str(e)}")
+            raise
+
     async def close(self) -> None:
         if self._redis:
             await self._redis.close()

+ 1 - 1
foundation/rag/vector/milvus_vector.py

@@ -1,6 +1,6 @@
 import time
 from pymilvus import connections, Collection, FieldSchema, CollectionSchema, DataType, utility
-from sentence_transformers import SentenceTransformer
+# from sentence_transformers import SentenceTransformer
 import numpy as np
 from typing import List, Dict, Any, Optional
 import json

+ 311 - 10
foundation/utils/redis_utils.py

@@ -1,16 +1,21 @@
 
 import json
+import time
+import asyncio
+import sys
+from pathlib import Path
+# root_dir = Path(__file__).parent.parent.parent 
+# print(root_dir) 
+# sys.path.append(str(root_dir))  
+from typing import Dict, Optional, Any
+from .time_statistics import track_execution_time
+from foundation.base.config import config_handler
 from foundation.logger.loggering import server_logger
 from foundation.base.redis_connection import RedisConnectionFactory
-from foundation.base.config import config_handler
 # 缓存数据有效期 默认 3 分钟
 CACHE_DATA_EXPIRED_TIME = 3 * 60
 
 
-
-
-
-
 async def set_redis_result_cache_data(data_type: str , trace_id: str, value: str):
     """
       设置redis结果缓存数据
@@ -24,9 +29,6 @@ async def set_redis_result_cache_data(data_type: str , trace_id: str, value: str
     redis_store = await RedisConnectionFactory.get_redis_store()
     await redis_store.set(key, value , ex=expired_time) 
 
-
-
-
 async def get_redis_result_cache_data(data_type: str , trace_id: str):
     """
       获取redis结果缓存数据
@@ -41,7 +43,6 @@ async def get_redis_result_cache_data(data_type: str , trace_id: str):
     return value
 
 
-
 async def get_redis_result_cache_data_and_delete_key(data_type: str , trace_id: str):
     """
       获取redis结果缓存数据
@@ -61,4 +62,304 @@ async def get_redis_result_cache_data_and_delete_key(data_type: str , trace_id:
     data = json.loads(json_str)
     # 删除key
     #await redis_store.delete(key)
-    return data
+    return data
+
+
+@track_execution_time
+async def _store_file_chunked(file_id: str, file_content: bytes, chunk_size: int = 1024*1024, expire_seconds: int = 3600) -> bool:
+    """
+    分块存储大文件内容(内部方法)
+    """
+    try:
+        redis_store = await RedisConnectionFactory.get_redis_store()
+        file_size = len(file_content)
+
+        server_logger.info(f"开始分块存储文件: {file_id}, 大小: {file_size/1024/1024:.2f}MB, 分块大小: {chunk_size/1024/1024:.2f}MB")
+
+        # 计算分块数量
+        chunk_count = (file_size + chunk_size - 1) // chunk_size
+
+        # 创建分块索引信息
+        chunk_index = {
+            'file_id': file_id,
+            'file_size': file_size,
+            'chunk_size': chunk_size,
+            'chunk_count': chunk_count,
+            'created_at': int(time.time())
+        }
+
+        # 存储分块索引
+        await redis_store.setex(f"chunks:{file_id}", expire_seconds, json.dumps(chunk_index))
+
+        # 分块存储文件内容
+        tasks = []
+        for i in range(chunk_count):
+            start = i * chunk_size
+            end = min(start + chunk_size, file_size)
+            chunk_data = file_content[start:end]
+            chunk_key = f"chunk:{file_id}:{i}"
+            task = redis_store.setex(chunk_key, expire_seconds, chunk_data)
+            tasks.append(task)
+
+        # 并行执行所有分块存储
+        await asyncio.gather(*tasks)
+
+        server_logger.info(f"文件分块存储完成: {file_id}, {chunk_count}个块")
+        return True
+
+    except Exception as e:
+        server_logger.error(f"分块存储文件失败: {file_id}, {str(e)}")
+        return False
+
+
+async def _get_file_chunked(file_id: str) -> bytes:
+    """
+    获取大文件内容(从分块中组装)(内部方法)
+    """
+    try:
+        redis_store = await RedisConnectionFactory.get_redis_store()
+
+        # 获取分块索引
+        chunk_index_json = await redis_store.get(f"chunks:{file_id}")
+        if not chunk_index_json:
+            server_logger.warning(f"文件分块索引不存在: {file_id}")
+            return None
+
+        chunk_index = json.loads(chunk_index_json.decode('utf-8'))
+        chunk_count = chunk_index['chunk_count']
+
+        # 并行获取所有分块
+        tasks = []
+        for i in range(chunk_count):
+            chunk_key = f"chunk:{file_id}:{i}"
+            task = redis_store.get(chunk_key)
+            tasks.append(task)
+
+        # 并行执行获取
+        chunks = await asyncio.gather(*tasks)
+
+        # 组装文件内容
+        file_content = b''.join(chunks)
+        return file_content
+
+    except Exception as e:
+        server_logger.error(f"获取分块文件失败: {file_id}, {str(e)}")
+        return None
+
+
+async def _delete_file_chunks(file_id: str) -> bool:
+    """
+    删除大文件分块(内部方法)
+    """
+    try:
+        redis_store = await RedisConnectionFactory.get_redis_store()
+
+        # 获取分块索引
+        chunk_index_json = await redis_store.get(f"chunks:{file_id}")
+        if not chunk_index_json:
+            return True  # 可能已经删除了
+
+        chunk_index = json.loads(chunk_index_json.decode('utf-8'))
+        chunk_count = chunk_index['chunk_count']
+
+        # 构造要删除的所有键
+        keys_to_delete = [f"chunks:{file_id}"]
+        keys_to_delete.extend([f"chunk:{file_id}:{i}" for i in range(chunk_count)])
+
+        # 批量删除
+        await redis_store.delete(*keys_to_delete)
+        return True
+
+    except Exception as e:
+        server_logger.error(f"删除文件分块失败: {file_id}, {str(e)}")
+        return False
+
+
+@track_execution_time
+async def store_file_info(file_id: str, file_info: Dict[str, Any], expire_seconds: int = 3600) -> bool:
+    """
+    存储文件信息(自动优化:小文件直接存储,大文件分块存储)
+
+    Args:
+        file_id: 文件ID
+        file_info: 文件信息字典
+        expire_seconds: 过期时间(秒),默认1小时
+
+    Returns:
+        bool: 存储是否成功
+    """
+    # 直接存储开关,True表示使用直接存储,False表示使用分块存储
+    direct_storage = True  
+    try:
+        redis_store = await RedisConnectionFactory.get_redis_store()
+
+        # 检查是否已存在,避免重复存储
+        existing_meta = await redis_store.get(f"meta:{file_id}")
+        if existing_meta:
+            server_logger.info(f"文件信息已存在,跳过存储: {file_id}")
+            return True
+
+        # 提取文件内容
+        file_content = file_info.get('file_content')
+
+        if file_content:
+            file_size = len(file_content)
+            chunk_threshold = 50 * 1024 * 1024  # 50MB阈值
+
+            # 根据文件大小和强制参数选择存储策略
+            if direct_storage or file_size <= chunk_threshold:
+                storage_method = "直接存储" if direct_storage else "直接存储"
+                server_logger.info(f"使用{storage_method}策略: {file_id}, {file_size/1024/1024:.2f}MB")
+
+                # 直接存储
+                metadata = {k: v for k, v in file_info.items() if k != 'file_content'}
+                metadata['storage_type'] = 'direct_test' if direct_storage else 'direct'
+                metadata['file_size'] = file_size
+
+                # 并行执行元数据和内容存储以提高性能
+                tasks = [
+                    redis_store.setex(f"meta:{file_id}", expire_seconds, json.dumps(metadata)),
+                    redis_store.setex(f"content:{file_id}", expire_seconds, file_content)
+                ]
+                await asyncio.gather(*tasks)
+
+            else:
+                server_logger.info(f"使用分块存储策略: {file_id}, {file_size/1024/1024:.2f}MB > 50MB")
+
+                # 分块存储文件内容
+                chunk_success = await _store_file_chunked(file_id, file_content, expire_seconds=expire_seconds)
+                if not chunk_success:
+                    return False
+
+                # 存储元数据(不含文件内容)
+                metadata = {k: v for k, v in file_info.items() if k != 'file_content'}
+                metadata['storage_type'] = 'chunked'
+                metadata['file_size'] = file_size
+
+                await redis_store.setex(f"meta:{file_id}", expire_seconds, json.dumps(metadata))
+        else:
+            # 没有文件内容,只存元数据
+            metadata = file_info.copy()
+            metadata['storage_type'] = 'metadata_only'
+            await redis_store.setex(f"meta:{file_id}", expire_seconds, json.dumps(metadata))
+
+        server_logger.info(f"文件信息已存储到Redis: {file_id}")
+        return True
+
+    except Exception as e:
+        server_logger.error(f"存储文件信息到Redis失败: {str(e)}")
+        return False
+
+@track_execution_time
+async def get_file_info(file_id: str, include_content: bool = True) -> Optional[Dict[str, Any]]:
+    """
+    根据file_id获取文件信息(自动适配分块和直接存储)
+
+    Args:
+        file_id: 文件ID
+        include_content: 是否包含文件内容(默认True),可选False以提高效率
+
+    Returns:
+        Dict: 文件信息字典,如果不存在返回None
+    """
+    try:
+        redis_store = await RedisConnectionFactory.get_redis_store()
+
+        # 获取元数据
+        meta_key = f"meta:{file_id}"
+        meta_bytes = await redis_store.get(meta_key)
+
+        if not meta_bytes:
+            server_logger.warning(f"文件元数据不存在: {meta_key}")
+            return None
+
+        # 解析元数据
+        file_info = json.loads(meta_bytes.decode('utf-8'))
+        storage_type = file_info.get('storage_type', 'direct')
+
+        # 根据存储类型获取文件内容
+        if include_content and 'file_size' in file_info:
+            if storage_type == 'chunked':
+                # 从分块中获取文件内容
+                file_content = await _get_file_chunked(file_id)
+                if file_content:
+                    file_info['file_content'] = file_content
+                else:
+                    server_logger.warning(f"分块文件内容获取失败: {file_id}")
+            elif storage_type == 'direct':
+                # 直接获取文件内容
+                content_key = f"content:{file_id}"
+                file_content = await redis_store.get(content_key)
+                if file_content:
+                    file_info['file_content'] = file_content
+                else:
+                    server_logger.warning(f"文件内容不存在: {content_key}")
+
+        server_logger.info(f"从Redis获取到文件信息: {meta_key}, 存储类型: {storage_type}")
+        return file_info
+
+    except json.JSONDecodeError as e:
+        server_logger.error(f"解析文件元数据JSON失败: {str(e)}")
+        return None
+    except Exception as e:
+        server_logger.error(f"获取文件信息失败: {str(e)}")
+        return None
+
+@track_execution_time
+async def delete_file_info(file_id: str) -> bool:
+    """
+    删除文件信息(自动适配分块和直接存储)
+
+    Args:
+        file_id: 文件ID
+
+    Returns:
+        bool: 删除是否成功
+    """
+    try:
+        redis_store = await RedisConnectionFactory.get_redis_store()
+
+        # 获取元数据以确定存储类型
+        meta_key = f"meta:{file_id}"
+        meta_bytes = await redis_store.get(meta_key)
+
+        if not meta_bytes:
+            server_logger.warning(f"文件元数据不存在: {meta_key}")
+            return True  # 可能已经删除了
+
+        # 解析元数据
+        file_info = json.loads(meta_bytes.decode('utf-8'))
+        storage_type = file_info.get('storage_type', 'direct')
+
+        # 根据存储类型删除相应的内容
+        deleted_count = 0
+
+        # 删除元数据
+        deleted_count += await redis_store.delete(meta_key)
+
+        if storage_type == 'chunked':
+            # 删除分块内容
+            chunk_success = await _delete_file_chunks(file_id)
+            if chunk_success:
+                server_logger.info(f"已删除分块文件内容: {file_id}")
+        elif storage_type == 'direct':
+            # 删除直接存储的内容
+            content_key = f"content:{file_id}"
+            deleted_count += await redis_store.delete(content_key)
+
+        if deleted_count > 0:
+            server_logger.info(f"已删除文件信息: {file_id}, {deleted_count}个键")
+            return True
+        else:
+            server_logger.warning(f"Redis缓存不存在,无法删除: {file_id}")
+            return False
+
+    except json.JSONDecodeError as e:
+        server_logger.error(f"解析文件元数据JSON失败: {str(e)}")
+        return False
+    except Exception as e:
+        server_logger.error(f"删除文件信息失败: {str(e)}")
+        return False
+
+#asyncio.run(delete_file_info('e385049cde7d21a48c7de216182f0f23'))
+

+ 22 - 5
foundation/utils/time_statistics.py

@@ -1,21 +1,38 @@
 import time
+import asyncio
+import inspect
 from functools import wraps
 from ..logger.loggering import server_logger as logger
 
 def track_execution_time(func):
     """
     追踪函数执行时间并通过日志输出的装饰器
-    记录函数开始执行、执行完成及耗时(保留两位小数)
+    同时支持同步和异步函数,记录函数开始执行、执行完成及耗时(保留两位小数)
     """
     @wraps(func)
-    def wrapper(*args, **kwargs):
+    def sync_wrapper(*args, **kwargs):
         logger.info(f"[{func.__name__}] 开始执行")
         start_time = time.perf_counter()
-        
+
         try:
             return func(*args, **kwargs)
         finally:
             duration = time.perf_counter() - start_time
             logger.info(f"[{func.__name__}] 执行完成,耗时: {duration:.2f} 秒")
-    
-    return wrapper
+
+    @wraps(func)
+    async def async_wrapper(*args, **kwargs):
+        logger.info(f"[{func.__name__}] 开始执行")
+        start_time = time.perf_counter()
+
+        try:
+            return await func(*args, **kwargs)
+        finally:
+            duration = time.perf_counter() - start_time
+            logger.info(f"[{func.__name__}] 执行完成,耗时: {duration:.2f} 秒")
+
+    # 检查函数是否是异步函数
+    if inspect.iscoroutinefunction(func):
+        return async_wrapper
+    else:
+        return sync_wrapper

+ 8 - 3
requirements.txt

@@ -81,7 +81,7 @@ markdown-it-py==3.0.0
 marshmallow==3.26.1
 mcp==1.10.1
 mdurl==0.1.2
-milvus-lite==2.5.1
+#milvus-lite==2.5.1
 mmh3==5.1.0
 mpmath==1.3.0
 multidict==6.6.3
@@ -164,7 +164,7 @@ ujson==5.10.0
 urllib3==2.5.0
 uv==0.7.20
 uvicorn==0.35.0
-uvloop==0.21.0
+#uvloop==0.21.0
 watchfiles==1.1.0
 websocket-client==1.8.0
 websockets==15.0.1
@@ -181,7 +181,12 @@ langgraph-checkpoint-postgres==2.0.23
 langgraph-checkpoint-redis==0.0.8
 langchain-redis==0.2.3
 aiomysql==0.3.2
-celery=5.5.3
+celery==5.5.3
 pypdf==6.2.0
 grandalf==0.8
+psycopg2-binary==2.9.11
+pgvector==0.4.1
+pymilvus==2.5.11
+
+
 

Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 12 - 3
temp/AI审查结果.json


+ 18 - 19
views/construction_review/app.py

@@ -3,31 +3,26 @@
 整合所有接口,提供统一的测试服务
 """
 
-import datetime
 import sys
-import os
-import threading
-import subprocess
 import time
-from multiprocessing import Process
-
-# 添加项目根目录到Python路径
-current_dir = os.path.dirname(os.path.abspath(__file__))
-project_root = os.path.dirname(os.path.dirname(current_dir))
-sys.path.insert(0, project_root)
+import uvicorn
+import datetime
+import threading
+from pathlib import Path
+root_dir = Path(__file__).parent.parent.parent
+sys.path.append(str(root_dir))
 
-# 现在可以正常导入了
-from foundation.logger.loggering import server_logger as logger
-from foundation.base.celery_app import app as celery_app
 from fastapi import FastAPI, HTTPException
-from fastapi.middleware.cors import CORSMiddleware
 from fastapi.responses import JSONResponse
-import uvicorn
+from fastapi.middleware.cors import CORSMiddleware
+from foundation.base.celery_app import app as celery_app
+from foundation.logger.loggering import server_logger as logger
+
 
-# 现在可以正常导入了
 from views.construction_review.file_upload import file_upload_router
 from views.construction_review.task_progress import task_progress_router
 from views.construction_review.review_results import review_results_router
+from views.construction_review.launch_review import launch_review_router
 
 def create_app() -> FastAPI:
     """创建接口服务"""
@@ -50,6 +45,7 @@ def create_app() -> FastAPI:
     app.include_router(file_upload_router)
     app.include_router(task_progress_router)
     app.include_router(review_results_router)
+    app.include_router(launch_review_router)
 
     # 全局异常处理
     @app.exception_handler(HTTPException)
@@ -90,6 +86,12 @@ def create_app() -> FastAPI:
                     "method": "POST",
                     "description": "上传施工方案文档"
                 },
+                {
+                    "name": "审查启动",
+                    "path": "/sgsc/launch_review",
+                    "method": "POST",
+                    "description": "启动AI审查工作流"
+                },
                 {
                     "name": "进度查询",
                     "path": "/sgsc/task_progress/{callback_task_id}",
@@ -125,9 +127,6 @@ class CeleryWorkerManager:
             return True
 
         try:
-            # 导入Celery应用
-            from foundation.base.celery_app import app as celery_app
-
             # 创建Worker函数
             def run_celery_worker():
                 try:

+ 67 - 55
views/construction_review/file_upload.py

@@ -8,14 +8,14 @@ import uuid
 import time
 from datetime import datetime
 
-from pydantic import BaseModel
+from pydantic import BaseModel, Field
 from typing import Optional,List
 from foundation.utils import md5
 from foundation.base.config import config_handler
 from .schemas.error_schemas import FileUploadErrors
 from core.base.workflow_manager import WorkflowManager
 from foundation.logger.loggering import server_logger as logger
-from fastapi import APIRouter, UploadFile, File, Form, HTTPException
+from fastapi import APIRouter, UploadFile, File, Form, HTTPException, Request
 from core.base.redis_duplicate_checker import RedisDuplicateChecker
 from foundation.trace.trace_context import TraceContext, auto_trace
 
@@ -30,10 +30,26 @@ workflow_manager = WorkflowManager(
 # 使用workflow_manager的duplicatechecker实例,确保一致性
 duplicatechecker = workflow_manager.redis_duplicate_checker
 
+
+
 class FileUploadResponse(BaseModel):
     code: int
     data: dict
 
+def validate_upload_parameters(form_data) -> None:
+    """验证请求参数"""
+    allowed_params = {'file', 'user'}  # 只允许这两个参数
+
+    # 检查是否有不允许的参数
+    extra_params = []
+    for key in form_data.keys():
+        if key not in allowed_params:
+            extra_params.append(key)
+
+    if extra_params:
+        logger.warning(f"检测到不支持的参数: {extra_params}")
+        raise FileUploadErrors.invalid_parameters(extra_params)
+
 def get_file_size(file: UploadFile) -> int:
     """获取文件大小的可靠同步方法(兼容 seek 仅支持单参数的情况)"""
     try:
@@ -71,23 +87,20 @@ def validate_file(file: UploadFile, file_content: bytes = None) -> None:
     logger.info(f"文件类型验证通过: {actual_file_type} (扩展名: {file_extension}, MIME: {file.content_type})")
 
 @file_upload_router.post("/file_upload", response_model=FileUploadResponse)
-@auto_trace(generate_if_missing=True)  # 不查找参数,直接生成初始trace_id
+@auto_trace(generate_if_missing=True)  # 由于使用@auto_trace需要输入callback_task_id,但此时callback_task_id还未产生,所以暂时用初始trace_id替代
 async def file_upload(
+    request: Request,
     file: List[UploadFile] = File([]),
-    callback_url: str = Form(None),
-    project_plan_type: str = Form(None),
     user: str = Form(None)
 ):
     """
     文件上传接口
     """
     try:
-        # 验证工程方案类型
-        valid_project_types = {
-            'bridge_up_part',  # 桥梁上部结构
-            'tunnel_construction',  # 隧道施工
-            'road_repair'  # 道路维修
-        }
+        # 验证请求参数
+        form_data = await request.form()
+        validate_upload_parameters(form_data)
+
         valid_users = ast.literal_eval(config_handler.get("user_lists", "USERS"))
         
         # 验证文件上传
@@ -116,25 +129,16 @@ async def file_upload(
             raise FileUploadErrors.file_missing()
 
         # 验证文件大小限制
-        if file_size_mb > 30:  # 文件大小不能超过30MB
+        if file_size_mb > 50:  # 文件大小不能超过50MB
             raise FileUploadErrors.file_size_exceeded()
-        
-        # 验证回调地址
-        if callback_url is '':
-            raise FileUploadErrors.callback_url_missing()
- 
+
         # 验证用户标识
         if user is None or user not in valid_users:
             raise FileUploadErrors.invalid_user()
-        
-        # 工程方案类型校验
-        if project_plan_type not in valid_project_types:
-            raise FileUploadErrors.project_plan_type_invalid()
 
         # 生成文件MD5ID
         file_id = md5.md5_id(content)
-        if await duplicatechecker.is_duplicate_task(file_id):
-            raise FileUploadErrors.task_already_exists()
+
 
         created_at = int(time.time())
 
@@ -145,8 +149,6 @@ async def file_upload(
         logger.info(f"文件头信息: {content[:50] if 'content' in locals() else '未读取'}")
         logger.info(f"文件大小: {file_size_mb} MB")
         logger.info(f"========================", log_type="upload")
-        logger.info(f"请求参数 - 回调URL: {callback_url}\n, 工程类型: {project_plan_type}",
-                    log_type="upload")
         logger.info(f"用户标识: {user}")
 
         # 确定文件类型
@@ -158,15 +160,11 @@ async def file_upload(
         else:
             file_type = 'unknown'
 
-
-        # 生成回调任务ID
-        #callback_task_id = f"{file_id}-{int(datetime.now().timestamp())}"
-        callback_task_id = "d0856b13c5328e732e9c590209554b76-1763369845"            
-
-        # 更新trace_id为正式的callback_task_id
+        # 生成任务ID
+        callback_task_id = f"{file_id}-{int(datetime.now().timestamp())}"         
         TraceContext.set_trace_id(callback_task_id)
-        logger.info(f"更新trace_id为正式callback_task_id: {callback_task_id}")
-
+        logger.info(f"设置任务trace_id: {callback_task_id}")
+        
         # 记录文件信息
         file_info = {
                 'file_id': file_id,
@@ -176,32 +174,46 @@ async def file_upload(
                 'callback_task_id': callback_task_id,
                 "file_name": file[0].filename,
                 "file_size": file_size_mb,
-                "project_plan_type": project_plan_type,
                 'updated_at': created_at
             }
 
+        # 存储文件信息到Redis缓存,以file_id为键,供启动审查接口使用
         try:
-            # 提交处理任务到工作流管理器
-            await workflow_manager.submit_task_processing(file_info)
-            logger.info(f"文档处理任务已提交,任务ID: {callback_task_id}")
-
-
-
-            return FileUploadResponse(
-                code=200,
-                data={
-                    "id": file_info['file_id'],
-                    "name": file_info['file_name'],
-                    "size": file_size_mb,
-                    "created_at": created_at,
-                    "status": "processing",
-                    "callback_task_id": file_info['callback_task_id']
-                }
-            )
-
-        except Exception as workflow_error:
-            logger.error(f"工作流提交失败: {str(workflow_error)}")
-            raise FileUploadErrors.internal_error(workflow_error)
+            from foundation.utils.redis_utils import store_file_info
+
+            # 使用file_id作为键存储文件信息(1小时过期)
+            success = await store_file_info(file_id, file_info, 3600)
+            if success:
+                logger.info(f"文件信息已缓存到Redis: file_info:{file_id}")
+            else:
+                logger.warning(f"缓存文件信息到Redis失败")
+
+        except Exception as e:
+            logger.warning(f"缓存文件信息到Redis失败: {str(e)}")
+            # 不影响主流程,继续处理
+
+        # try:
+            # # 提交处理任务到工作流管理器
+            # await workflow_manager.submit_task_processing(file_info)
+            # logger.info(f"文档处理任务已提交,任务ID: {callback_task_id}")
+
+
+
+        return FileUploadResponse(
+            code=200,
+            data={
+                "id": file_info['file_id'],
+                "name": file_info['file_name'],
+                "size": file_size_mb,
+                "created_at": created_at,
+                "status": "file_upload_success",
+                "callback_task_id": file_info['callback_task_id']
+            }
+        )
+
+        # except Exception as workflow_error:
+        #     logger.error(f"工作流提交失败: {str(workflow_error)}")
+        #     raise FileUploadErrors.internal_error(workflow_error)
 
     except HTTPException:
         logger.error(f"HTTP异常: {traceback.format_exc()}")

+ 159 - 0
views/construction_review/launch_review.py

@@ -0,0 +1,159 @@
+"""
+施工方案审查启动接口
+接收审查配置参数,启动AI审查工作流
+"""
+
+import uuid
+import time
+from datetime import datetime
+from typing import List, Optional, Dict, Any
+from pydantic import BaseModel, Field
+from fastapi import APIRouter, HTTPException
+from core.base.redis_duplicate_checker import RedisDuplicateChecker
+from foundation.logger.loggering import server_logger as logger
+from foundation.trace.trace_context import TraceContext, auto_trace
+from foundation.utils.redis_utils import get_file_info, delete_file_info
+from core.base.workflow_manager import WorkflowManager
+from views.construction_review.file_upload import validate_upload_parameters
+from .schemas.error_schemas import LaunchReviewErrors
+
+launch_review_router = APIRouter(prefix="/sgsc", tags=["审查启动"])
+duplicatechecker = RedisDuplicateChecker()
+# 初始化工作流管理器
+workflow_manager = WorkflowManager(
+    max_concurrent_docs=3,
+    max_concurrent_reviews=5
+)
+
+
+class LaunchReviewRequest(BaseModel):
+    """启动审查请求模型"""
+    callback_task_id: str = Field(..., description="回调任务ID,从文件上传接口获取")
+    review_config: List[str] = Field(
+        ...,
+        description="审查配置列表,包含的项为启用状态"
+    )
+    project_plan_type: str = Field(
+        "bridge_up_part",
+        description="工程方案类型,当前仅支持 bridge_up_part"
+    )
+
+    class Config:
+        extra = "forbid"  # 禁止额外的字段
+
+
+class LaunchReviewResponse(BaseModel):
+    """启动审查响应模型"""
+    code: int
+    data: dict
+
+
+def validate_review_config(review_config: List[str]) -> None:
+    """验证审查配置参数"""
+    # 检查review_config是否为空
+    if not review_config or len(review_config) == 0:
+        raise LaunchReviewErrors.enum_type_cannot_be_null()
+
+    # 支持的审查项枚举值
+    supported_review_items = {
+        'sensitive_word_check',       # 词句语法检查
+        'semantic_logic_check',       # 语义逻辑审查
+        'completeness_check',         # 条文完整性审查
+        'timeliness_check',           # 时效性审查
+        'reference_check',            # 规范性审查
+        'sensitive_words_check',      # 敏感词审查
+        'mandatory_standards_check',  # 强制性标准检查
+        'technical_parameters_check', # 技术参数精确检查
+        'design_values_check'         # 设计值符合性检查
+    }
+
+    # 检查是否包含不支持的审查项
+    unsupported_items = set(review_config) - supported_review_items
+    if unsupported_items:
+        raise LaunchReviewErrors.enum_type_invalid()
+
+def validate_project_plan_type(project_plan_type: str) -> None:
+    """验证工程方案类型"""
+    # 当前支持的工程方案类型
+    supported_types = {'bridge_up_part'}  # 桥梁上部结构
+
+    if project_plan_type not in supported_types:
+        raise LaunchReviewErrors.project_plan_type_invalid()
+
+
+@launch_review_router.post("/sse/launch_review", response_model=LaunchReviewResponse)
+@auto_trace(generate_if_missing=True)
+async def launch_review(request_data: LaunchReviewRequest):
+    """
+    启动施工方案审查
+
+    Args:
+        request_data: 启动审查请求参数
+
+    Returns:
+        LaunchReviewResponse: 包含任务ID的响应
+    """
+    try:
+
+        callback_task_id = request_data.callback_task_id
+        review_config = request_data.review_config
+        project_plan_type = request_data.project_plan_type
+
+        logger.info(f"收到审查启动请求: callback_task_id={callback_task_id}")
+
+        # 验证审查配置
+        validate_review_config(review_config)
+
+        # 验证工程方案类型
+        validate_project_plan_type(project_plan_type)
+
+        try:          
+
+            # 从callback_task_id中提取file_id (格式: file_id-timestamp)
+            file_id = callback_task_id.rsplit('-', 1)[0] if '-' in callback_task_id else callback_task_id
+
+            # 检查重复任务
+            if await duplicatechecker.is_duplicate_task(file_id):
+                raise LaunchReviewErrors.task_already_exists()
+
+            # 获取文件信息(确保包含文件内容)
+            file_info = await get_file_info(file_id, include_content=True)
+
+            if not file_info:
+                raise LaunchReviewErrors.task_not_found()
+
+            # 验证必要的字段是否存在
+            if 'file_content' not in file_info:
+                logger.error(f"文件信息中缺少file_content字段,可用字段: {list(file_info.keys())}")
+                raise LaunchReviewErrors.task_not_found()
+
+            # 添加审查配置到文件信息
+            file_info.update({
+                'review_config': review_config,
+                'project_plan_type': project_plan_type,
+                'launched_at': int(time.time())
+            })
+
+            logger.info(f"获取到文件信息: file_id={file_id}, 包含字段: {list(file_info.keys())}")
+            logger.info(f"文件内容大小: {len(file_info.get('file_content', b''))} bytes")
+
+            # 注意:暂不删除Redis缓存,让工作流处理完成后再清理
+            # await delete_file_info(file_id)
+            logger.info(f"保留Redis缓存供工作流使用: file_info:{file_id}")
+
+        except Exception as e:
+            logger.error(f"获取文件信息失败: {str(e)}")
+            raise LaunchReviewErrors.file_info_not_found(e)
+
+        # 提交处理任务到工作流管理器
+        task_id = await workflow_manager.submit_task_processing(file_info)
+
+
+
+
+    except HTTPException:
+        # 重新抛出HTTP异常
+        raise
+    except Exception as e:
+        logger.error(f"启动审查失败: {str(e)}")
+        raise LaunchReviewErrors.internal_error(e)

+ 107 - 51
views/construction_review/schemas/error_schemas.py

@@ -42,16 +42,11 @@ class ErrorCodes:
     WJSC005 = {
         "code": "WJSC005",
         "error_type": "FILE_SIZE_EXCEEDED",
-        "message": "文件过大(最大不超过30MB)",
+        "message": "文件过大(最大不超过50MB)",
         "status_code": 400
     }
 
-    WJSC006 = {
-        "code": "WJSC006",
-        "error_type": "PROJECT_PLAN_TYPE_INVALID",
-        "message": "无效工程方案类型(未提供或未注册)",
-        "status_code": 400
-    }
+
 
     WJSC007 = {
         "code": "WJSC007",
@@ -69,17 +64,11 @@ class ErrorCodes:
 
     WJSC009 = {
         "code": "WJSC009",
-        "error_type": "CALLBACK_URL_MISS",
-        "message": "回调客户端地址缺失,请提供回调客户端地址",
-        "status_code": 403
+        "error_type": "INVALID_PARAMETERS",
+        "message": "请求参数无效或不支持",
+        "status_code": 400
     }
 
-    WJSC010 = {
-        "code": "WJSC010",
-        "error_type": "TASK_ALREADY_EXISTS",
-        "message": "任务已存在,请勿重复提交",
-        "status_code": 409
-    }
 
     WJSC011 = {
         "code": "WJSC011",
@@ -88,49 +77,88 @@ class ErrorCodes:
         "status_code": 500
     }
 
-    # 进度查询接口错误码 (JDLX001-JDLX006)
-    JDLX001 = {
-        "code": "JDLX001",
+
+
+    # 启动审查接口错误码 (QDSC001-QDSC006)
+    QDSC001 = {
+        "code": "QDSC001",
         "error_type": "MISSING_PARAMETERS",
         "message": "请求参数缺失",
         "status_code": 400
     }
 
-    JDLX002 = {
-        "code": "JDLX002",
+    QDSC002 = {
+        "code": "QDSC002",
         "error_type": "INVALID_PARAM_FORMAT",
         "message": "请求参数格式错误",
         "status_code": 400
     }
 
-    JDLX003 = {
-        "code": "JDLX003",
+    QDSC003 = {
+        "code": "QDSC003",
         "error_type": "UNAUTHORIZED",
         "message": "认证失败(未提供或无效的Authorization)",
         "status_code": 401
     }
 
-    JDLX004 = {
-        "code": "JDLX004",
+    QDSC004 = {
+        "code": "QDSC004",
         "error_type": "INVALID_USER",
         "message": "用户标识未提供或无效",
         "status_code": 403
     }
 
-    JDLX005 = {
-        "code": "JDLX005",
+    QDSC005 = {
+        "code": "QDSC005",
         "error_type": "TASK_NOT_FOUND",
         "message": "任务ID不存在或已过期",
         "status_code": 404
     }
 
-    JDLX006 = {
-        "code": "JDLX006",
+    
+    QDSC006 = {
+        "code": "QDSC006",
+        "error_type": "TASK_ALREADY_EXISTS",
+        "message": "任务已存在,请勿重复提交",
+        "status_code": 409
+    }
+
+    QDSC007 = {
+        "code": "QDSC007",
+        "error_type": "PROJECT_PLAN_TYPE_INVALID",
+        "message": "无效工程方案类型(未提供或未注册)",
+        "status_code": 400
+    }
+
+    QDSC008 = {
+        "code": "QDSC008",
+        "error_type": "ENUM_TYPE_INVALID",
+        "message": "审查枚举类型无效",
+        "status_code": 400
+    }
+
+    QDSC009 = {
+        "code": "QDSC009",
+        "error_type": "ENUM_TYPE_CANNOT_BE_NULL",
+        "message": "审查枚举类型不能为空",
+        "status_code": 400
+    }
+
+    QDSC010 = {
+        "code": "QDSC010",
+        "error_type": "FILE_INFO_NOT_FOUND",
+        "message": "文件信息获取失败",
+        "status_code": 500
+    }
+
+    QDSC011 = {
+        "code": "QDSC011",
         "error_type": "SERVER_INTERNAL_ERROR",
         "message": "服务端内部错误",
         "status_code": 500
     }
 
+
     # 审查结果接口错误码 (SCJG001-SCJG008)
     SCJG001 = {
         "code": "SCJG001",
@@ -217,7 +245,7 @@ def create_server_error(error_code: str, original_error: Exception) -> HTTPExcep
     创建服务器内部错误异常
 
     Args:
-        error_code: 错误码 (如 "WJSC008", "JDLX006", "SCJG008")
+        error_code: 错误码 (如 "WJSC008", "QDSC006", "SCJG008")
         original_error: 原始异常
 
     Returns:
@@ -225,7 +253,7 @@ def create_server_error(error_code: str, original_error: Exception) -> HTTPExcep
     """
     error_map = {
         "WJSC011": ErrorCodes.WJSC011,
-        "JDLX006": ErrorCodes.JDLX006,
+        "QDSC006": ErrorCodes.QDSC006,
         "SCJG008": ErrorCodes.SCJG008
     }
 
@@ -279,10 +307,6 @@ class FileUploadErrors:
         logger.error(ErrorCodes.WJSC008)
         return create_http_exception(ErrorCodes.WJSC008)
     
-    @staticmethod
-    def callback_url_missing():
-        logger.error(ErrorCodes.WJSC009)
-        return create_http_exception(ErrorCodes.WJSC009)
 
 
     @staticmethod
@@ -291,44 +315,74 @@ class FileUploadErrors:
         return create_http_exception(ErrorCodes.WJSC010)
 
     
+    @staticmethod
+    def invalid_parameters():
+        logger.error(ErrorCodes.WJSC009)
+        return create_http_exception(ErrorCodes.WJSC009)
+
     @staticmethod
     def internal_error(original_error: Exception):
         logger.error(ErrorCodes.WJSC011)
         return create_server_error("WJSC011", original_error)
 
 
-class TaskProgressErrors:
-    """进度查询接口错误"""
+class LaunchReviewErrors:
+    """启动审查接口错误"""
 
     @staticmethod
     def missing_parameters():
-        logger.error(ErrorCodes.JDLX001)
-        return create_http_exception(ErrorCodes.JDLX001)
+        logger.error(ErrorCodes.QDSC001)
+        return create_http_exception(ErrorCodes.QDSC001)
 
     @staticmethod
     def invalid_param_format():
-        logger.error(ErrorCodes.JDLX002)
-        return create_http_exception(ErrorCodes.JDLX002)
+        logger.error(ErrorCodes.QDSC002)
+        return create_http_exception(ErrorCodes.QDSC002)
 
     @staticmethod
     def unauthorized():
-        logger.error(ErrorCodes.JDLX003)
-        return create_http_exception(ErrorCodes.JDLX003)
+        logger.error(ErrorCodes.QDSC003)
+        return create_http_exception(ErrorCodes.QDSC003)
 
     @staticmethod
     def invalid_user():
-        logger.error(ErrorCodes.JDLX004)
-        return create_http_exception(ErrorCodes.JDLX004)
+        logger.error(ErrorCodes.QDSC004)
+        return create_http_exception(ErrorCodes.QDSC004)
 
     @staticmethod
     def task_not_found():
-        logger.error(ErrorCodes.JDLX005)
-        return create_http_exception(ErrorCodes.JDLX005)
+        logger.error(ErrorCodes.QDSC005)
+        return create_http_exception(ErrorCodes.QDSC005)
+
+    @staticmethod
+    def task_already_exists():
+        logger.error(ErrorCodes.QDSC006)
+        return create_http_exception(ErrorCodes.QDSC006)
+
+    @staticmethod
+    def project_plan_type_invalid():
+        logger.error(ErrorCodes.QDSC007)
+        return create_http_exception(ErrorCodes.QDSC007)
+
+    @staticmethod
+    def enum_type_invalid():
+        logger.error(ErrorCodes.QDSC008)
+        return create_http_exception(ErrorCodes.QDSC008)
 
     @staticmethod
-    def server_internal_error(original_error: Exception):
-        logger.error(ErrorCodes.JDLX006, original_error)
-        return create_server_error("JDLX006", original_error)
+    def enum_type_cannot_be_null():
+        logger.error(ErrorCodes.QDSC009)
+        return create_http_exception(ErrorCodes.QDSC009)
+
+    @staticmethod
+    def file_info_not_found(original_error: Exception):
+        logger.error(ErrorCodes.QDSC010)
+        return create_server_error("QDSC010", original_error)
+
+    @staticmethod
+    def internal_error(original_error: Exception):
+        logger.error(ErrorCodes.QDSC011)
+        return create_server_error("QDSC011", original_error)
 
 
 class ReviewResultsErrors:
@@ -372,4 +426,6 @@ class ReviewResultsErrors:
     @staticmethod
     def server_error(original_error: Exception):
         logger.error(ErrorCodes.SCJG008)
-        return create_server_error("SCJG008", original_error)
+        return create_server_error("SCJG008", original_error)
+
+

+ 3 - 3
views/construction_review/task_progress.py

@@ -8,7 +8,7 @@ from typing import Dict
 from datetime import datetime
 from pydantic import BaseModel
 from fastapi import APIRouter, Query
-from .schemas.error_schemas import TaskProgressErrors
+from .schemas.error_schemas import LaunchReviewErrors
 from fastapi.responses import StreamingResponse
 from foundation.logger.loggering import server_logger as logger
 from foundation.trace.trace_context import TraceContext, auto_trace
@@ -91,7 +91,7 @@ async def sse_progress_stream(
     try:
         valid_users = {"user-001", "user-002", "user-003"}
         if user not in valid_users:
-            raise TaskProgressErrors.invalid_user()
+            raise LaunchReviewErrors.invalid_user()
         sse_callback_manager.register_callback(callback_task_id, sse_progress_callback)
 
         queue = await sse_manager.connect(callback_task_id)
@@ -179,7 +179,7 @@ async def sse_progress_stream(
 
     except Exception as e:
         logger.error(f"SSE连接失败: {callback_task_id}, {e}")
-        raise TaskProgressErrors.server_internal_error(e)
+        raise LaunchReviewErrors.internal_error(e)
 
 
 @task_progress_router.get("/sse/status")

Kaikkia tiedostoja ei voida näyttää, sillä liian monta tiedostoa muuttui tässä diffissä