test_views.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687
  1. # !/usr/bin/ python
  2. # -*- coding: utf-8 -*-
  3. '''
  4. @Project : lq-agent-api
  5. @File :cattle_farm_views.py
  6. @IDE :PyCharm
  7. @Author :
  8. @Date :2025/7/10 17:32
  9. '''
  10. import json
  11. from typing import Optional
  12. from fastapi import Depends, Response, Header
  13. from sse_starlette import EventSourceResponse
  14. from starlette.responses import JSONResponse
  15. from foundation.agent.test_agent import test_agent_client
  16. from foundation.agent.generate.model_generate import test_generate_model_client
  17. from foundation.logger.loggering import server_logger
  18. from foundation.schemas.test_schemas import TestForm
  19. from foundation.utils.common import return_json, handler_err
  20. from views import test_router, get_operation_id
  21. from foundation.agent.workflow.test_workflow_graph import test_workflow_graph
  22. from file_processors.pdf_processor import PDFProcessor
  23. from file_processors.bfp_pdf_processor import BfpPDFProcessor
  24. from foundation.models.silicon_flow import SiliconFlowAPI
  25. from foundation.rag.vector.pg_vector_mananger import PGVectorManager
  26. from foundation.rag.vector.pg_vector import PGVectorDB
  27. @test_router.post("/generate/chat", response_model=TestForm)
  28. async def generate_chat_endpoint(
  29. param: TestForm,
  30. trace_id: str = Depends(get_operation_id)):
  31. """
  32. 生成类模型
  33. """
  34. try:
  35. server_logger.info(trace_id=trace_id, msg=f"{param}")
  36. print(trace_id)
  37. # 从字典中获取input
  38. input_query = param.input
  39. session_id = param.config.session_id
  40. context = param.context
  41. header_info = {
  42. }
  43. task_prompt_info = {"task_prompt": ""}
  44. output = test_generate_model_client.get_model_generate_invoke(trace_id , task_prompt_info,
  45. input_query, context)
  46. # 直接执行
  47. server_logger.debug(trace_id=trace_id, msg=f"【result】: {output}", log_type="agent/chat")
  48. # 返回字典格式的响应
  49. return JSONResponse(
  50. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  51. except ValueError as err:
  52. handler_err(server_logger, trace_id=trace_id, err=err, err_name="generate/stream")
  53. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  54. except Exception as err:
  55. handler_err(server_logger, trace_id=trace_id, err=err, err_name="generate/stream")
  56. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  57. @test_router.post("/generate/stream", response_model=TestForm)
  58. async def generate_stream_endpoint(
  59. param: TestForm,
  60. trace_id: str = Depends(get_operation_id)):
  61. """
  62. 生成类模型
  63. """
  64. try:
  65. server_logger.info(trace_id=trace_id, msg=f"{param}")
  66. # 从字典中获取input
  67. input_query = param.input
  68. session_id = param.config.session_id
  69. context = param.context
  70. header_info = {
  71. }
  72. task_prompt_info = {"task_prompt": ""}
  73. # 创建 SSE 流式响应
  74. async def event_generator():
  75. try:
  76. # 流式处理查询 trace_id, task_prompt_info: dict, input_query, context=None
  77. for chunk in test_generate_model_client.get_model_generate_stream(trace_id , task_prompt_info,
  78. input_query, context):
  79. # 发送数据块
  80. yield {
  81. "event": "message",
  82. "data": json.dumps({
  83. "output": chunk,
  84. "completed": False,
  85. }, ensure_ascii=False)
  86. }
  87. # 获取缓存数据
  88. result_data = {}
  89. # 发送结束事件
  90. yield {
  91. "event": "message_end",
  92. "data": json.dumps({
  93. "completed": True,
  94. "message": json.dumps(result_data, ensure_ascii=False),
  95. "code": 0,
  96. "trace_id": trace_id,
  97. }, ensure_ascii=False),
  98. }
  99. except Exception as e:
  100. # 错误处理
  101. yield {
  102. "event": "error",
  103. "data": json.dumps({
  104. "trace_id": trace_id,
  105. "message": str(e),
  106. "code": 1
  107. }, ensure_ascii=False)
  108. }
  109. finally:
  110. # 不需要关闭客户端,因为它是单例
  111. pass
  112. # 返回 SSE 响应
  113. return EventSourceResponse(
  114. event_generator(),
  115. headers={
  116. "Cache-Control": "no-cache",
  117. "Connection": "keep-alive"
  118. }
  119. )
  120. except ValueError as err:
  121. handler_err(server_logger, trace_id=trace_id, err=err, err_name="generate/stream")
  122. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  123. except Exception as err:
  124. handler_err(server_logger, trace_id=trace_id, err=err, err_name="generate/stream")
  125. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  126. # 路由
  127. @test_router.post("/agent/chat", response_model=TestForm)
  128. async def chat_endpoint(
  129. param: TestForm,
  130. trace_id: str = Depends(get_operation_id)):
  131. """
  132. 根据场景获取智能体反馈
  133. """
  134. try:
  135. server_logger.info(trace_id=trace_id, msg=f"{param}")
  136. # 验证参数
  137. # 从字典中获取input
  138. input_data = param.input
  139. session_id = param.config.session_id
  140. context = param.context
  141. header_info = {
  142. }
  143. task_prompt_info = {"task_prompt": ""}
  144. # stream 流式执行
  145. output = await test_agent_client.handle_query(trace_id , task_prompt_info, input_data, context, param.config)
  146. # 直接执行
  147. server_logger.debug(trace_id=trace_id, msg=f"【result】: {output}", log_type="agent/chat")
  148. # 返回字典格式的响应
  149. return JSONResponse(
  150. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  151. except ValueError as err:
  152. handler_err(server_logger, trace_id=trace_id, err=err, err_name="agent/chat")
  153. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  154. except Exception as err:
  155. handler_err(server_logger, trace_id=trace_id, err=err, err_name="agent/chat")
  156. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  157. @test_router.post("/agent/stream", response_class=Response)
  158. async def chat_agent_stream(param: TestForm,
  159. trace_id: str = Depends(get_operation_id)):
  160. """
  161. 根据场景获取智能体反馈 (SSE流式响应)
  162. """
  163. try:
  164. server_logger.info(trace_id=trace_id, msg=f"{param}")
  165. # 提取参数
  166. input_data = param.input
  167. context = param.context
  168. header_info = {
  169. }
  170. task_prompt_info = {"task_prompt": ""}
  171. # 如果business_scene为None,则使用大模型进行意图识别
  172. server_logger.info(trace_id=trace_id, msg=f"{param}")
  173. # 创建 SSE 流式响应
  174. async def event_generator():
  175. try:
  176. # 流式处理查询
  177. async for chunk in test_agent_client.handle_query_stream(
  178. trace_id=trace_id,
  179. config_param=param.config,
  180. task_prompt_info=task_prompt_info,
  181. input_query=input_data,
  182. context=context,
  183. header_info=header_info
  184. ):
  185. server_logger.debug(trace_id=trace_id, msg=f"{chunk}")
  186. # 发送数据块
  187. yield {
  188. "event": "message",
  189. "data": json.dumps({
  190. "code": 0,
  191. "output": chunk,
  192. "completed": False,
  193. "trace_id": trace_id,
  194. }, ensure_ascii=False)
  195. }
  196. # 获取缓存数据
  197. result_data = {}
  198. # 发送结束事件
  199. yield {
  200. "event": "message_end",
  201. "data": json.dumps({
  202. "completed": True,
  203. "message": json.dumps(result_data, ensure_ascii=False),
  204. "code": 0,
  205. "trace_id": trace_id,
  206. }, ensure_ascii=False),
  207. }
  208. except Exception as e:
  209. # 错误处理
  210. yield {
  211. "event": "error",
  212. "data": json.dumps({
  213. "trace_id": trace_id,
  214. "message": str(e),
  215. "code": 1
  216. }, ensure_ascii=False)
  217. }
  218. finally:
  219. # 不需要关闭客户端,因为它是单例
  220. pass
  221. # 返回 SSE 响应
  222. return EventSourceResponse(
  223. event_generator(),
  224. headers={
  225. "Cache-Control": "no-cache",
  226. "Connection": "keep-alive"
  227. }
  228. )
  229. except Exception as err:
  230. # 初始错误处理
  231. handler_err(server_logger, trace_id=trace_id, err=err, err_name="agent/stream")
  232. return JSONResponse(
  233. return_json(code=1, msg=f"{err}", trace_id=trace_id),
  234. status_code=500
  235. )
  236. @test_router.post("/graph/stream", response_class=Response)
  237. async def chat_graph_stream(param: TestForm,
  238. trace_id: str = Depends(get_operation_id)):
  239. """
  240. 根据场景获取智能体反馈 (SSE流式响应)
  241. """
  242. try:
  243. server_logger.info(trace_id=trace_id, msg=f"{param}")
  244. # request_param = {
  245. # "input": param.input,
  246. # "config": param.config,
  247. # "context": param.context
  248. # }
  249. # 创建 SSE 流式响应
  250. async def event_generator():
  251. try:
  252. # 流式处理查询
  253. async for chunk in test_workflow_graph.handle_query_stream(
  254. param=param,
  255. trace_id=trace_id,
  256. ):
  257. server_logger.debug(trace_id=trace_id, msg=f"{chunk}")
  258. # 发送数据块
  259. yield {
  260. "event": "message",
  261. "data": json.dumps({
  262. "code": 0,
  263. "output": chunk,
  264. "completed": False,
  265. "trace_id": trace_id,
  266. "dataType": "text"
  267. }, ensure_ascii=False)
  268. }
  269. # 发送结束事件
  270. yield {
  271. "event": "message_end",
  272. "data": json.dumps({
  273. "completed": True,
  274. "message": "Stream completed",
  275. "code": 0,
  276. "trace_id": trace_id,
  277. "dataType": "text"
  278. }, ensure_ascii=False),
  279. }
  280. except Exception as e:
  281. # 错误处理
  282. yield {
  283. "event": "error",
  284. "data": json.dumps({
  285. "trace_id": trace_id,
  286. "msg": str(e),
  287. "code": 1,
  288. "dataType": "text"
  289. }, ensure_ascii=False)
  290. }
  291. finally:
  292. # 不需要关闭客户端,因为它是单例
  293. pass
  294. # 返回 SSE 响应
  295. return EventSourceResponse(
  296. event_generator(),
  297. headers={
  298. "Cache-Control": "no-cache",
  299. "Connection": "keep-alive"
  300. }
  301. )
  302. except Exception as err:
  303. # 初始错误处理
  304. handler_err(server_logger, trace_id=trace_id, err=err, err_name="graph/stream")
  305. return JSONResponse(
  306. return_json(code=1, msg=f"{err}", trace_id=trace_id),
  307. status_code=500
  308. )
  309. @test_router.post("/data/governance", response_model=TestForm)
  310. async def generate_chat_endpoint(
  311. param: TestForm,
  312. trace_id: str = Depends(get_operation_id)):
  313. """
  314. 生成类模型
  315. """
  316. try:
  317. server_logger.info(trace_id=trace_id, msg=f"{param}")
  318. print(trace_id)
  319. # 从字典中获取input
  320. input_query = param.input
  321. session_id = param.config.session_id
  322. context = param.context
  323. header_info = {
  324. }
  325. task_prompt_info = {"task_prompt": ""}
  326. output = test_generate_model_client.get_model_data_governance_invoke(trace_id , task_prompt_info,
  327. input_query, context)
  328. # 直接执行
  329. server_logger.debug(trace_id=trace_id, msg=f"【result】: {output}", log_type="agent/chat")
  330. # 返回字典格式的响应
  331. return JSONResponse(
  332. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  333. except ValueError as err:
  334. handler_err(server_logger, trace_id=trace_id, err=err, err_name="generate/stream")
  335. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  336. except Exception as err:
  337. handler_err(server_logger, trace_id=trace_id, err=err, err_name="generate/stream")
  338. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  339. @test_router.post("/data/pdf/governance", response_model=TestForm)
  340. async def pdf_governance_endpoint(
  341. param: TestForm,
  342. trace_id: str = Depends(get_operation_id)):
  343. """
  344. 生成类模型
  345. """
  346. try:
  347. server_logger.info(trace_id=trace_id, msg=f"{param}")
  348. print(trace_id)
  349. # 从字典中获取input
  350. input_query = param.input
  351. session_id = param.config.session_id
  352. context = param.context
  353. header_info = {
  354. }
  355. task_prompt_info = {"task_prompt": ""}
  356. #file_directory= "I:/wangxun_dev_workspace/lq_workspace/LQDataGovernance/test/pdf_files"
  357. file_directory= "test/pdf_files"
  358. # 初始化知识问答处理
  359. pdf_processor = PDFProcessor(directory=file_directory)
  360. file_data = pdf_processor.process_pdfs_group()
  361. server_logger.info(trace_id=trace_id, msg=f"【result】: {file_data}", log_type="agent/chat")
  362. output = None
  363. #output = test_generate_model_client.get_model_data_governance_invoke(trace_id , task_prompt_info, input_query, context)
  364. # 直接执行
  365. #server_logger.debug(trace_id=trace_id, msg=f"【result】: {output}", log_type="agent/chat")
  366. # 返回字典格式的响应
  367. return JSONResponse(
  368. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  369. except ValueError as err:
  370. handler_err(server_logger, trace_id=trace_id, err=err, err_name="generate/stream")
  371. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  372. except Exception as err:
  373. handler_err(server_logger, trace_id=trace_id, err=err, err_name="generate/stream")
  374. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  375. @test_router.post("/data/bfp/governance", response_model=TestForm)
  376. async def bfp_governance_endpoint(
  377. param: TestForm,
  378. trace_id: str = Depends(get_operation_id)):
  379. """
  380. 编制依据文档切分处理
  381. """
  382. try:
  383. server_logger.info(trace_id=trace_id, msg=f"{param}")
  384. print(trace_id)
  385. # 从字典中获取input
  386. input_query = param.input
  387. session_id = param.config.session_id
  388. context = param.context
  389. header_info = {
  390. }
  391. task_prompt_info = {"task_prompt": ""}
  392. #file_directory= "I:/wangxun_dev_workspace/lq_workspace/LQDataGovernance/test/pdf_files"
  393. file_directory= "test/bfp_files"
  394. # 初始化知识问答处理
  395. pdf_processor = BfpPDFProcessor(directory=file_directory)
  396. file_data = pdf_processor.process_pdfs_group()
  397. server_logger.info(trace_id=trace_id, msg=f"【result】: ", log_type="bfp/governance")
  398. output = None
  399. #server_logger.debug(trace_id=trace_id, msg=f"【result】: {output}", log_type="agent/chat")
  400. # 返回字典格式的响应
  401. return JSONResponse(
  402. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  403. except ValueError as err:
  404. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/governance")
  405. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  406. except Exception as err:
  407. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/governance")
  408. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  409. @test_router.post("/data/embedding/test", response_model=TestForm)
  410. async def embedding_test_endpoint(
  411. param: TestForm,
  412. trace_id: str = Depends(get_operation_id)):
  413. """
  414. embedding模型测试
  415. """
  416. try:
  417. server_logger.info(trace_id=trace_id, msg=f"{param}")
  418. print(trace_id)
  419. # 从字典中获取input
  420. input_query = param.input
  421. session_id = param.config.session_id
  422. context = param.context
  423. header_info = {
  424. }
  425. task_prompt_info = {"task_prompt": ""}
  426. text = input_query
  427. # 初始化客户端(需提前设置环境变量 SILICONFLOW_API_KEY)
  428. from foundation.models.silicon_flow import SiliconFlowAPI
  429. base_api_platform = SiliconFlowAPI()
  430. embedding = base_api_platform.get_embeddings([text])[0]
  431. embed_dim = len(embedding)
  432. server_logger.info(trace_id=trace_id, msg=f"【result】: {embed_dim}")
  433. output = f"embed_dim={embed_dim},embedding:{embedding}"
  434. #output = test_generate_model_client.get_model_data_governance_invoke(trace_id , task_prompt_info, input_query, context)
  435. # 直接执行
  436. #server_logger.debug(trace_id=trace_id, msg=f"【result】: {output}", log_type="embedding")
  437. # 返回字典格式的响应
  438. return JSONResponse(
  439. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  440. except ValueError as err:
  441. handler_err(server_logger, trace_id=trace_id, err=err, err_name="embedding")
  442. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  443. except Exception as err:
  444. handler_err(server_logger, trace_id=trace_id, err=err, err_name="embedding")
  445. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  446. @test_router.post("/data/pgvector/test", response_model=TestForm)
  447. async def pgvector_test_endpoint(
  448. param: TestForm,
  449. trace_id: str = Depends(get_operation_id)):
  450. """
  451. pg_vector 向量检索测试
  452. """
  453. try:
  454. server_logger.info(trace_id=trace_id, msg=f"{param}")
  455. print(trace_id)
  456. # 从字典中获取input
  457. input_query = param.input
  458. session_id = param.config.session_id
  459. context = param.context
  460. header_info = {
  461. }
  462. task_prompt_info = {"task_prompt": ""}
  463. output = "success"
  464. # 初始化客户端(需提前设置环境变量 SILICONFLOW_API_KEY)
  465. client = SiliconFlowAPI()
  466. # 初始化数据库管理器
  467. # 1、原始测试
  468. # db_manager = PGVectorManager(client)
  469. # db_manager.db_test(query_text=input_query)
  470. # 2、抽象测试
  471. pg_vector_db = PGVectorDB(base_api_platform=client)
  472. output = pg_vector_db.similarity_cosine_search(param={"table_name": "test_documents"}, query_text=input_query)
  473. # 直接执行
  474. #server_logger.debug(trace_id=trace_id, msg=f"【result】: {output}", log_type="agent/chat")
  475. # 返回字典格式的响应
  476. return JSONResponse(
  477. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  478. except ValueError as err:
  479. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/governance")
  480. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  481. except Exception as err:
  482. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/governance")
  483. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  484. @test_router.post("/data/bfp/indb", response_model=TestForm)
  485. async def bfp_indb_endpoint(
  486. param: TestForm,
  487. trace_id: str = Depends(get_operation_id)):
  488. """
  489. 编制依据文档切分处理 和 入库处理
  490. """
  491. try:
  492. server_logger.info(trace_id=trace_id, msg=f"{param}")
  493. print(trace_id)
  494. # 从字典中获取input
  495. input_query = param.input
  496. session_id = param.config.session_id
  497. context = param.context
  498. header_info = {
  499. }
  500. task_prompt_info = {"task_prompt": ""}
  501. #file_directory= "I:/wangxun_dev_workspace/lq_workspace/LQDataGovernance/test/pdf_files"
  502. file_directory= "test/bfp_files"
  503. # 初始化知识问答处理
  504. pdf_processor = BfpPDFProcessor(directory=file_directory)
  505. file_data_list , total_chunks = pdf_processor.get_pdfs_group_data()
  506. server_logger.info(trace_id=trace_id, msg=f"【result】: ", log_type="bfp/governance")
  507. output = None
  508. # 初始化客户端(需提前设置环境变量 SILICONFLOW_API_KEY)
  509. client = SiliconFlowAPI()
  510. # 抽象测试
  511. pg_vector_db = PGVectorDB(base_api_platform=client)
  512. for file_data in file_data_list:
  513. #file_data = file_data[0:5]
  514. # 数据标准化处理
  515. documents = pg_vector_db.document_standard(file_data)
  516. pg_vector_db.add_tqdm_batch_documents(param={"table_name": "tv_basis_of_preparation"}, documents=documents)
  517. #server_logger.debug(trace_id=trace_id, msg=f"【result】: {output}", log_type="agent/chat")
  518. # 返回字典格式的响应
  519. return JSONResponse(
  520. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  521. except ValueError as err:
  522. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/governance")
  523. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  524. except Exception as err:
  525. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/governance")
  526. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  527. @test_router.post("/data/bfp/batch/indb", response_model=TestForm)
  528. async def bfp_batch_indb_endpoint(
  529. param: TestForm,
  530. trace_id: str = Depends(get_operation_id)):
  531. """
  532. 编制依据文档 批量切分和入库处理
  533. """
  534. try:
  535. server_logger.info(trace_id=trace_id, msg=f"{param}")
  536. # 从字典中获取input
  537. input_query = param.input
  538. session_id = param.config.session_id
  539. context = param.context
  540. header_info = {
  541. }
  542. # 初始化客户端(需提前设置环境变量 SILICONFLOW_API_KEY)
  543. client = SiliconFlowAPI()
  544. # 抽象测试
  545. pg_vector_db = PGVectorDB(base_api_platform=client)
  546. #file_directory= "I:/wangxun_dev_workspace/lq_workspace/LQDataGovernance/test/pdf_files"
  547. file_directory= "test/bfp_files"
  548. # 初始化知识问答处理
  549. pdf_processor = BfpPDFProcessor(directory=file_directory, base_vector=pg_vector_db)
  550. pdf_processor.process_tqdm_pdfs_group()
  551. server_logger.info(trace_id=trace_id, msg=f"【result】: ", log_type="bfp/batch/indb")
  552. output = "success"
  553. #server_logger.debug(trace_id=trace_id, msg=f"【result】: {output}", log_type="agent/chat")
  554. # 返回字典格式的响应
  555. return JSONResponse(
  556. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  557. except ValueError as err:
  558. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/batch/indb")
  559. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  560. except Exception as err:
  561. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/batch/indb")
  562. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  563. @test_router.post("/data/bfp/search", response_model=TestForm)
  564. async def bfp_search_endpoint(
  565. param: TestForm,
  566. trace_id: str = Depends(get_operation_id)):
  567. """
  568. 编制依据文档切分处理 和 入库处理
  569. """
  570. try:
  571. server_logger.info(trace_id=trace_id, msg=f"{param}")
  572. print(trace_id)
  573. # 从字典中获取input
  574. input_query = param.input
  575. session_id = param.config.session_id
  576. context = param.context
  577. header_info = {
  578. }
  579. task_prompt_info = {"task_prompt": ""}
  580. top_k = int(session_id)
  581. output = None
  582. # 初始化客户端(需提前设置环境变量 SILICONFLOW_API_KEY)
  583. client = SiliconFlowAPI()
  584. # 抽象测试
  585. pg_vector_db = PGVectorDB(base_api_platform=client)
  586. output = pg_vector_db.retriever(param={"table_name": "tv_basis_of_preparation"}, query_text=input_query , top_k=top_k)
  587. # 返回字典格式的响应
  588. return JSONResponse(
  589. return_json(data={"output": output}, data_type="text", trace_id=trace_id))
  590. except ValueError as err:
  591. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/search")
  592. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))
  593. except Exception as err:
  594. handler_err(server_logger, trace_id=trace_id, err=err, err_name="bfp/search")
  595. return JSONResponse(return_json(code=100500, msg=f"{err}", trace_id=trace_id))