celery_app.py 2.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576
  1. """
  2. Celery应用配置
  3. 负责任务队列管理,不涉及具体业务逻辑
  4. """
  5. import os
  6. from celery import Celery
  7. from .config import config_handler
  8. # 导入trace系统
  9. from foundation.trace.celery_trace import init
  10. # 从配置文件获取Redis连接信息
  11. redis_host = config_handler.get('redis', 'REDIS_HOST', 'localhost')
  12. redis_port = config_handler.get('redis', 'REDIS_PORT', '6379')
  13. redis_password = config_handler.get('redis', 'REDIS_PASSWORD', '')
  14. redis_db = config_handler.get('redis', 'REDIS_DB', '0')
  15. # 构建Redis连接URL
  16. if redis_password:
  17. redis_url = f"redis://:{redis_password}@{redis_host}:{redis_port}/{redis_db}"
  18. else:
  19. redis_url = f"redis://{redis_host}:{redis_port}/{redis_db}"
  20. print(f"Connecting to Redis: {redis_url}")
  21. app = Celery(
  22. 'workflow_tasks',
  23. broker=redis_url,
  24. backend=redis_url,
  25. include=['foundation.base.tasks']
  26. )
  27. # 配置
  28. app.conf.update(
  29. task_serializer='json',
  30. accept_content=['json'],
  31. result_serializer='json',
  32. timezone='Asia/Shanghai',
  33. enable_utc=True,
  34. # Worker配置
  35. worker_prefetch_multiplier=2, # 每个worker一次只取一个任务
  36. task_acks_late=True, # 任务完成后再确认
  37. # 并发控制
  38. worker_concurrency=2, # 每个worker进程数(文档处理较重,不宜过多)
  39. worker_pool='solo', # 使用单线程模式(避免GIL问题)
  40. # 网络和连接配置 - 防止30分钟断连
  41. broker_connection_timeout=30, # 连接超时30秒
  42. broker_connection_retry=True, # 启用连接重试
  43. broker_connection_retry_on_startup=True, # 启动时重试
  44. broker_connection_max_retries=10, # 最大重试次数
  45. broker_heartbeat=60, # 心跳间隔60秒(默认是30秒的2倍)
  46. broker_transport_options={
  47. 'visibility_timeout': 3600, # 任务可见性超时
  48. 'socket_keepalive': True, # 启用socket keepalive
  49. },
  50. # 任务配置
  51. task_track_started=True,
  52. task_time_limit=600, # 10分钟超时(文档处理较慢)
  53. task_soft_time_limit=540, # 9分钟软超时
  54. worker_max_tasks_per_child=5, # 每个worker进程最多处理5个任务后重启(防止内存泄漏)
  55. # 结果过期时间
  56. result_expires=3600, # 1小时后过期
  57. # 连接池配置
  58. broker_pool_limit=None, # 无连接池限制
  59. result_backend_pool_limit=None, # 无结果后端连接池限制
  60. )
  61. # 初始化Celery trace系统
  62. init()